diff --git a/.cloudbuild.yaml b/.cloudbuild.yaml index d8c770f4729..9acdb403ef5 100644 --- a/.cloudbuild.yaml +++ b/.cloudbuild.yaml @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Test before submit: +# gcloud builds submit --config=.cloudbuild.yaml --substitutions=COMMIT_SHA="$(git rev-parse HEAD)" --project=ml-pipeline-test + steps: # Build the Python SDK @@ -52,14 +55,16 @@ steps: waitFor: ["-"] - name: 'gcr.io/cloud-builders/docker' args: ['build', '-t', 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA', - '--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f', - '/workspace/frontend/Dockerfile', '/workspace'] + '--build-arg', 'COMMIT_HASH=$COMMIT_SHA', + '--build-arg', 'TAG_NAME=$TAG_NAME', + '-f', '/workspace/frontend/Dockerfile', '/workspace'] id: 'buildFrontend' waitFor: ['prepareFrontend'] - name: 'gcr.io/cloud-builders/docker' args: ['build', '-t', 'gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA', - '--build-arg', 'COMMIT_SHA=$COMMIT_SHA', '-f', - '/workspace/backend/Dockerfile', '/workspace'] + '--build-arg', 'COMMIT_SHA=$COMMIT_SHA', + '--build-arg', 'TAG_NAME=$TAG_NAME', + '-f', '/workspace/backend/Dockerfile', '/workspace'] id: 'buildApiServer' waitFor: ['copyPythonSDK'] @@ -88,6 +93,32 @@ steps: '/workspace/backend/Dockerfile.visualization', '/workspace'] id: 'buildVisualizationServer' waitFor: ["-"] +- id: 'buildMetadataWriter' + name: 'gcr.io/cloud-builders/docker' + args: ['build', '-t', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', + '--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f', + '/workspace/backend/metadata_writer/Dockerfile', '/workspace'] + waitFor: ["-"] +- id: 'buildCacheServer' + name: 'gcr.io/cloud-builders/docker' + args: ['build', '-t', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', + '--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f', + '/workspace/backend/Dockerfile.cacheserver', '/workspace'] + waitFor: ["-"] +- id: 'buildCacheDeployer' + name: 'gcr.io/cloud-builders/docker' + args: ['build', '-t', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', + '--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f', + '/workspace/backend/src/cache/deployer/Dockerfile', '/workspace'] + waitFor: ["-"] + +# Build marketplace deployer +- id: 'buildMarketplaceDeployer' + name: 'gcr.io/cloud-builders/docker' + args: ['build', '-t', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', + '--build-arg', 'COMMIT_HASH=$COMMIT_SHA', '-f', + '/workspace/manifests/gcp_marketplace/deployer/Dockerfile', '/workspace/manifests/gcp_marketplace'] + waitFor: ["-"] # Build the Kubeflow-based pipeline component images - name: 'gcr.io/cloud-builders/docker' @@ -100,6 +131,11 @@ steps: args: ['-c', 'cd /workspace/components/kubeflow/launcher && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA'] id: 'buildLauncher' waitFor: ["-"] +- id: 'buildCpuTrainer' + name: 'gcr.io/cloud-builders/docker' + entrypoint: '/bin/bash' + args: ['-c', 'cd /workspace/components/kubeflow/dnntrainer && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA -l ml-pipeline-kubeflow-tf-trainer -b 1.6.0'] + waitFor: ["-"] - name: 'gcr.io/cloud-builders/docker' entrypoint: '/bin/bash' args: ['-c', 'cd /workspace/components/kubeflow/dnntrainer && ./build_image.sh -p $PROJECT_ID -t $COMMIT_SHA -l ml-pipeline-kubeflow-tf-trainer-gpu -b 1.6.0-gpu'] @@ -163,6 +199,51 @@ steps: '/workspace/third_party/metadata_envoy/Dockerfile', '/workspace'] id: 'buildMetadataEnvoy' +# Pull third_party images +- name: 'gcr.io/cloud-builders/docker' + args: ['pull', 'gcr.io/tfx-oss-public/ml_metadata_store_server:0.21.1'] + id: 'pullMetadataServer' +- name: 'gcr.io/cloud-builders/docker' + args: ['pull', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance'] + id: 'pullMinio' +- name: 'gcr.io/cloud-builders/docker' + args: ['pull', 'gcr.io/ml-pipeline/mysql:5.6'] + id: 'pullMysql' +- name: 'gcr.io/cloud-builders/docker' + args: ['pull', 'gcr.io/cloudsql-docker/gce-proxy:1.14'] + id: 'pullCloudsqlProxy' +- name: 'gcr.io/cloud-builders/docker' + args: ['pull', 'gcr.io/ml-pipeline/argoexec:v2.7.5-license-compliance'] + id: 'pullArgoExecutor' +- name: 'gcr.io/cloud-builders/docker' + args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v2.7.5-license-compliance'] + id: 'pullArgoWorkflowController' + +# Tag for Hosted - SemVersion to Major.Minor parsing +- id: "parseMajorMinorVersion" + waitFor: ["-"] + name: gcr.io/cloud-builders/docker + entrypoint: /bin/bash + args: + - -ceux + - | + # Parse major minor version and save to a file for reusing in other steps. + cat /workspace/VERSION | sed -e "s#[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)#\1.\2#" > /workspace/mm.ver + +# Tag for Hosted - Tag to hosted folder with MKP friendly name +- id: 'tagForHosted' + waitFor: ['parseMajorMinorVersion', 'buildFrontend', 'buildApiServer', 'buildScheduledWorkflow', + 'buildViewerCrdController', 'buildPersistenceAgent', 'buildInverseProxyAgent', 'buildVisualizationServer', + 'buildMetadataWriter', 'buildCacheServer', 'buildCacheDeployer', 'buildMetadataEnvoy', + 'buildMarketplaceDeployer', 'pullMetadataServer', 'pullMinio', 'pullMysql', 'pullCloudsqlProxy', + 'pullArgoExecutor', 'pullArgoWorkflowController'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + /workspace/test/tag_for_hosted.sh $PROJECT_ID $COMMIT_SHA $(cat /workspace/VERSION) $(cat /workspace/mm.ver) + images: # Images for the pipeline system itself - 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA' @@ -172,12 +253,18 @@ images: - 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA' - 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA' - 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA' +- 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA' +- 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA' +- 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA' + +# Images for Marketplace +- 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA' # Images for the Kubeflow-based pipeline components - 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-deployer:$COMMIT_SHA' - 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer:$COMMIT_SHA' - 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf-trainer-gpu:$COMMIT_SHA' -- 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tf:$COMMIT_SHA' +- 'gcr.io/$PROJECT_ID/ml-pipeline-kubeflow-tfjob:$COMMIT_SHA' # Images for the Dataproc-based pipeline components - 'gcr.io/$PROJECT_ID/ml-pipeline-dataproc-analyze:$COMMIT_SHA' diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.md b/.github/ISSUE_TEMPLATE/BUG_REPORT.md index 259871838e4..d1b1afdf1dc 100644 --- a/.github/ISSUE_TEMPLATE/BUG_REPORT.md +++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.md @@ -4,13 +4,33 @@ about: Report a bug encountered with Kubeflow Pipelines --- -**What happened:** +### What steps did you take: +[A clear and concise description of what the bug is.] -**What did you expect to happen:** +### What happened: + +### What did you expect to happen: + +### Environment: + + +How did you deploy Kubeflow Pipelines (KFP)? + + +KFP version: + +KFP SDK version: -**What steps did you take:** -[A clear and concise description of what the bug is.] -**Anything else you would like to add:** +### Anything else you would like to add: [Miscellaneous information that will assist in solving the issue.] +/kind bug + + diff --git a/.gitignore b/.gitignore index 588872d177c..b1d84f8597a 100644 --- a/.gitignore +++ b/.gitignore @@ -14,7 +14,7 @@ bower_components/ dist # Web server -frontend/server/*.js +frontend/server/dist # Python built package *.egg-info @@ -39,6 +39,7 @@ frontend/test/ui/visual-regression/screenshots/screen *.pyc .DS_Store +build .ipynb_checkpoints *.egg-info @@ -46,6 +47,9 @@ frontend/test/ui/visual-regression/screenshots/screen # go vendor vendor +# Go module cache +backend/pkg/mod/cache + # Bazel output artifacts bazel-* @@ -61,3 +65,4 @@ _artifacts # Generated Python SDK documentation docs/_build + diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000000..c915d10042a --- /dev/null +++ b/.pylintrc @@ -0,0 +1,410 @@ +[MASTER] + +# Specify a configuration file. +#rcfile= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Use multiple processes to speed up Pylint. +jobs=1 + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist=numpy + +# Allow optimization of some AST trees. This will activate a peephole AST +# optimizer, which will apply various small optimizations. For instance, it can +# be used to obtain the result of joining multiple strings with the addition +# operator. Joining a lot of strings can lead to a maximum recursion error in +# Pylint and this flag can prevent that. It has one side effect, the resulting +# AST will be different than the one from reality. This option is deprecated +# and it will be removed in Pylint 2.0. +optimize-ast=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +# TODO(numerology): enable missing-module-docstring after finish the effort. +disable=missing-module-docstring + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html. You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". This option is deprecated +# and it will be removed in Pylint 2.0. +files-output=no + +# Tells whether to display a full report or only the messages +reports=yes + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + + +[BASIC] + +# Good variable names which should always be accepted, separated by a comma +# s3 is whitelisted for its special meaning. +good-names=i,j,k,ex,Run,_,s3 + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Regular expression matching correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for variable names +variable-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Naming hint for class attribute names +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for argument names +argument-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Naming hint for module names +module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Naming hint for constant names +const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Naming hint for inline iteration names +inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ + +# Regular expression matching correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for method names +method-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for function names +function-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Naming hint for attribute names +attr-name-hint=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming hint for class names +class-name-hint=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^test_ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + + +[ELIF] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=80 + +# Regexp for a line that is allowed to be longer than the limit. +# Ignoring 1) links in comment, 2) component yaml specs and 3) import statements. +ignore-long-lines=(^\s*(# )??$|^.*'https://raw\.githubusercontent\.com/kubeflow/pipelines\S*$|^\s*(import|from).*$) + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=y + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma,dict-separator + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=10 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + + +[SPELLING] + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,matplotlib.cm,tensorflow.python,tensorflow,tensorflow.train.Example,RunOptions + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members=set_shape,np.float32 + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=(_+[a-zA-Z0-9_]*?$)|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_,_cb + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,future.builtins + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=10 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=30 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of statements in function / method body +max-statements=100 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=10 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=0 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=optparse + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception \ No newline at end of file diff --git a/.release.cloudbuild.yaml b/.release.cloudbuild.yaml index 9a36d3487c6..886109c2028 100644 --- a/.release.cloudbuild.yaml +++ b/.release.cloudbuild.yaml @@ -13,38 +13,63 @@ # limitations under the License. steps: + +# Marketplace Major.Minor parsing +- id: "parseMajorMinorVersion" + name: gcr.io/cloud-builders/docker + entrypoint: /bin/bash + args: + - -ceux + - | + # Parse major minor version and save to a file for reusing in other steps. + echo $TAG_NAME | sed -e "s#[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)#\1.\2#" > /workspace/mm.ver + # Pull and retag the images for the pipeline system - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA'] id: 'pullFrontend' -- name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA', 'gcr.io/ml-pipeline/frontend:$TAG_NAME'] - id: 'tagFrontendVersionNumber' - waitFor: ['pullFrontend'] -- name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA', 'gcr.io/ml-pipeline/frontend:$COMMIT_SHA'] - id: 'tagFrontendCommitSHA' - waitFor: ['pullFrontend'] -- name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/frontend:$TAG_NAME'] - id: 'tagFrontendForMarketplace' - waitFor: ['pullFrontend'] +- id: 'tagFrontendForMarketplaceMajorMin' + waitFor: ['pullFrontend', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/frontend:$TAG_NAME + docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/frontend:$COMMIT_SHA + docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/frontend:$TAG_NAME + docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/frontend:$TAG_NAME + docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/frontend:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/frontend:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/frontend:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/frontend:$TAG_NAME + docker push gcr.io/ml-pipeline/frontend:$COMMIT_SHA + docker push gcr.io/ml-pipeline/google/pipelines/frontend:$TAG_NAME + docker push gcr.io/ml-pipeline/google/pipelines-test/frontend:$TAG_NAME + docker push gcr.io/ml-pipeline/google/pipelines/frontend:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/frontend:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA'] id: 'pullAPIServer' -- name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/api-server:$TAG_NAME'] - id: 'tagAPIServerVersionNumber' - waitFor: ['pullAPIServer'] -- name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/api-server:$COMMIT_SHA'] - id: 'tagAPIServerCommitSHA' - waitFor: ['pullAPIServer'] -- name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/apiserver:$TAG_NAME'] - id: 'tagAPIServerForMarketplace' - waitFor: ['pullAPIServer'] +- id: 'tagAPIServerForMarketplaceMajorMinor' + waitFor: ['pullAPIServer', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/api-server:$TAG_NAME' + docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/api-server:$COMMIT_SHA' + docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/google/pipelines/apiserver:$TAG_NAME' + docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA 'gcr.io/ml-pipeline/google/pipelines-test/apiserver:$TAG_NAME' + docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/apiserver:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/api-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/apiserver:$(cat /workspace/mm.ver) + docker push 'gcr.io/ml-pipeline/api-server:$TAG_NAME' + docker push 'gcr.io/ml-pipeline/api-server:$COMMIT_SHA' + docker push 'gcr.io/ml-pipeline/google/pipelines/apiserver:$TAG_NAME' + docker push 'gcr.io/ml-pipeline/google/pipelines-test/apiserver:$TAG_NAME' + docker push gcr.io/ml-pipeline/google/pipelines/apiserver:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/apiserver:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA'] @@ -61,6 +86,21 @@ steps: args: ['tag', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$TAG_NAME'] id: 'tagScheduledworkflowForMarketplace' waitFor: ['pullScheduledworkflow'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$TAG_NAME'] + id: 'tagScheduledworkflowForMarketplaceTest' + waitFor: ['pullScheduledworkflow'] +- id: 'tagScheduledworkflowForMarketplaceMajorMinor' + waitFor: ['pullScheduledworkflow', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/scheduledworkflow:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA'] @@ -77,6 +117,21 @@ steps: args: ['tag', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/viewercrd:$TAG_NAME'] id: 'tagViewerCrdControllerForMarketplace' waitFor: ['pullViewerCrdController'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$TAG_NAME'] + id: 'tagViewerCrdControllerForMarketplaceTest' + waitFor: ['pullViewerCrdController'] +- id: 'tagViewerCrdControllerForMarketplaceMajorMinor' + waitFor: ['pullViewerCrdController', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/viewercrd:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/viewer-crd-controller:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/viewercrd:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA'] @@ -93,6 +148,21 @@ steps: args: ['tag', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/persistenceagent:$TAG_NAME'] id: 'tagPersistenceagentForMarketplace' waitFor: ['pullPersistenceagent'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$TAG_NAME'] + id: 'tagPersistenceagentForMarketplaceTest' + waitFor: ['pullPersistenceagent'] +- id: 'tagPersistenceagentForMarketplaceMajorMinor' + waitFor: ['pullPersistenceagent', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/persistenceagent:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/persistenceagent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/persistenceagent:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA'] @@ -109,6 +179,21 @@ steps: args: ['tag', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/proxyagent:$TAG_NAME'] id: 'tagInverseProxyAgentForMarketplace' waitFor: ['pullInverseProxyAgent'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$TAG_NAME'] + id: 'tagInverseProxyAgentForMarketplaceTest' + waitFor: ['pullInverseProxyAgent'] +- id: 'tagInverseProxyAgentForMarketplaceMajorMinor' + waitFor: ['pullInverseProxyAgent', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/proxyagent:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/inverse-proxy-agent:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/proxyagent:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA'] @@ -125,55 +210,323 @@ steps: args: ['tag', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/visualizationserver:$TAG_NAME'] id: 'tagVisualizationServerForMarketplace' waitFor: ['pullVisualizationServer'] - - name: 'gcr.io/cloud-builders/docker' - args: ['pull', 'gcr.io/tfx-oss-public/ml_metadata_store_server:0.14.0'] - id: 'PullMetadataServer' + args: ['tag', 'gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$TAG_NAME'] + id: 'tagVisualizationServerForMarketplaceTest' + waitFor: ['pullVisualizationServer'] +- id: 'tagVisualizationServerForMarketplaceMajorMinor' + waitFor: ['pullVisualizationServer', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/visualizationserver:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/visualization-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/visualizationserver:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$(cat /workspace/mm.ver) + - name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/tfx-oss-public/ml_metadata_store_server:0.14.0', 'gcr.io/ml-pipeline/google/pipelines/metadataserver:$TAG_NAME'] - waitFor: ['PullMetadataServer'] + args: ['pull', 'gcr.io/tfx-oss-public/ml_metadata_store_server:0.21.1'] + id: 'pullMetadataServer' +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/tfx-oss-public/ml_metadata_store_server:0.21.1', 'gcr.io/ml-pipeline/google/pipelines/metadataserver:$TAG_NAME'] + id: 'tagMetadataServerForMarketplace' + waitFor: ['pullMetadataServer'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/tfx-oss-public/ml_metadata_store_server:0.21.1', 'gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$TAG_NAME'] + id: 'tagMetadataServerForMarketplaceTest' + waitFor: ['pullMetadataServer'] +- id: 'tagMetadataServerForMarketplaceMajorMinor' + waitFor: ['pullMetadataServer', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/tfx-oss-public/ml_metadata_store_server:0.21.1 gcr.io/ml-pipeline/google/pipelines/metadataserver:$(cat /workspace/mm.ver) + docker tag gcr.io/tfx-oss-public/ml_metadata_store_server:0.21.1 gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/metadataserver:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$(cat /workspace/mm.ver) + +- id: 'pullMetadataWriter' + name: 'gcr.io/cloud-builders/docker' + args: ['pull', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA'] + waitFor: ['-'] +- id: 'tagMetadataWriterVersionNumber' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-writer:$TAG_NAME'] + waitFor: ['pullMetadataWriter'] +- id: 'tagMetadataWriterCommitSHA' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/metadata-writer:$COMMIT_SHA'] + waitFor: ['pullMetadataWriter'] +- id: 'tagMetadataWriterForMarketplace' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/metadatawriter:$TAG_NAME'] + waitFor: ['pullMetadataWriter'] +- id: 'tagMetadataWriterForMarketplaceTest' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$TAG_NAME'] + waitFor: ['pullMetadataWriter'] +- id: 'tagMetadataWriterForMarketplaceMajorMinor' + waitFor: ['pullMetadataWriter', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/metadatawriter:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/metadata-writer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/metadatawriter:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$(cat /workspace/mm.ver) + +- id: 'pullCacheServer' + name: 'gcr.io/cloud-builders/docker' + args: ['pull', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA'] + waitFor: ['-'] +- id: 'tagCacheServerVersionNumber' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-server:$TAG_NAME'] + waitFor: ['pullCacheServer'] +- id: 'tagCacheServerCommitSHA' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-server:$COMMIT_SHA'] + waitFor: ['pullCacheServer'] +- id: 'tagCacheServerForMarketplace' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/cacheserver:$TAG_NAME'] + waitFor: ['pullCacheServer'] +- id: 'tagCacheServerForMarketplaceTest' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$TAG_NAME'] + waitFor: ['pullCacheServer'] +- id: 'tagCacheServerForMarketplaceMajorMinor' + waitFor: ['pullCacheServer', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/cacheserver:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/cache-server:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/cacheserver:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$(cat /workspace/mm.ver) + +- id: 'pullCacheDeployer' + name: 'gcr.io/cloud-builders/docker' + args: ['pull', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA'] + waitFor: ['-'] +- id: 'tagCacheDeployerVersionNumber' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-deployer:$TAG_NAME'] + waitFor: ['pullCacheDeployer'] +- id: 'tagCacheDeployerCommitSHA' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/cache-deployer:$COMMIT_SHA'] + waitFor: ['pullCacheDeployer'] +- id: 'tagCacheDeployerForMarketplace' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/cachedeployer:$TAG_NAME'] + waitFor: ['pullCacheDeployer'] +- id: 'tagCacheDeployerForMarketplaceTest' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$TAG_NAME'] + waitFor: ['pullCacheDeployer'] +- id: 'tagCacheDeployerForMarketplaceMajorMinor' + waitFor: ['pullCacheDeployer', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/cachedeployer:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/cache-deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/cachedeployer:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA'] - id: 'PullMetadataEnvoy' + id: 'pullMetadataEnvoy' - name: 'gcr.io/cloud-builders/docker' args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$TAG_NAME'] - waitFor: ['PullMetadataEnvoy'] + id: 'tagMetadataEnvoyForMarketplace' + waitFor: ['pullMetadataEnvoy'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$TAG_NAME'] + id: 'tagMetadataEnvoyForMarketplaceTest' + waitFor: ['pullMetadataEnvoy'] +- id: 'tagMetadataEnvoyForMarketplaceMajorMinor' + waitFor: ['pullMetadataEnvoy', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/metadata-envoy:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance'] - id: 'PullMinio' + id: 'pullMinio' - name: 'gcr.io/cloud-builders/docker' args: ['tag', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/minio:$TAG_NAME'] - waitFor: ['PullMinio'] + id: 'tagMinioForMarketplace' + waitFor: ['pullMinio'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/minio:$TAG_NAME'] + id: 'tagMinioForMarketplaceTest' + waitFor: ['pullMinio'] +- id: 'tagMinioForMarketplaceMajorMinor' + waitFor: ['pullMinio', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance gcr.io/ml-pipeline/google/pipelines/minio:$(cat /workspace/mm.ver) + docker tag gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance gcr.io/ml-pipeline/google/pipelines-test/minio:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/minio:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/minio:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/ml-pipeline/mysql:5.6'] - id: 'PullMysql' + id: 'pullMysql' - name: 'gcr.io/cloud-builders/docker' args: ['tag', 'gcr.io/ml-pipeline/mysql:5.6', 'gcr.io/ml-pipeline/google/pipelines/mysql:$TAG_NAME'] - waitFor: ['PullMysql'] + id: 'tagMySqlForMarketplace' + waitFor: ['pullMysql'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/ml-pipeline/mysql:5.6', 'gcr.io/ml-pipeline/google/pipelines-test/mysql:$TAG_NAME'] + id: 'tagMySqlForMarketplaceTest' + waitFor: ['pullMysql'] +- id: 'tagMySqlForMarketplaceMajorMinor' + waitFor: ['pullMysql', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/ml-pipeline/mysql:5.6 gcr.io/ml-pipeline/google/pipelines/mysql:$(cat /workspace/mm.ver) + docker tag gcr.io/ml-pipeline/mysql:5.6 gcr.io/ml-pipeline/google/pipelines-test/mysql:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/mysql:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/mysql:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' args: ['pull', 'gcr.io/cloudsql-docker/gce-proxy:1.14'] - id: 'PullCloudsqlProxy' + id: 'pullCloudsqlProxy' - name: 'gcr.io/cloud-builders/docker' args: ['tag', 'gcr.io/cloudsql-docker/gce-proxy:1.14', 'gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$TAG_NAME'] - waitFor: ['PullCloudsqlProxy'] + id: 'tagCloudSqlProxyForMarketplace' + waitFor: ['pullCloudsqlProxy'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/cloudsql-docker/gce-proxy:1.14', 'gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$TAG_NAME'] + id: 'tagCloudSqlProxyForMarketplaceTest' + waitFor: ['pullCloudsqlProxy'] +- id: 'tagCloudSqlProxyForMarketplaceMajorMinor' + waitFor: ['pullCloudsqlProxy', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/cloudsql-docker/gce-proxy:1.14 gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$(cat /workspace/mm.ver) + docker tag gcr.io/cloudsql-docker/gce-proxy:1.14 gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/cloudsqlproxy:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' - args: ['pull', 'gcr.io/ml-pipeline/argoexec:v2.3.0-license-compliance'] - id: 'PullArgoExecutor' -- name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/ml-pipeline/argoexec:v2.3.0-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoexecutor:$TAG_NAME'] - waitFor: ['PullArgoExecutor'] + args: ['pull', 'gcr.io/ml-pipeline/argoexec:v2.7.5-license-compliance'] + id: 'pullArgoExecutor' +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/ml-pipeline/argoexec:v2.7.5-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoexecutor:$TAG_NAME'] + id: 'tagArgoExecutorForMarketplace' + waitFor: ['pullArgoExecutor'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/ml-pipeline/argoexec:v2.7.5-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$TAG_NAME'] + id: 'tagArgoExecutorForMarketplaceTest' + waitFor: ['pullArgoExecutor'] +- id: 'tagArgoExecutorForMarketplaceMajorMinor' + waitFor: ['pullArgoExecutor', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/ml-pipeline/argoexec:v2.7.5-license-compliance gcr.io/ml-pipeline/google/pipelines/argoexecutor:$(cat /workspace/mm.ver) + docker tag gcr.io/ml-pipeline/argoexec:v2.7.5-license-compliance gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/argoexecutor:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$(cat /workspace/mm.ver) - name: 'gcr.io/cloud-builders/docker' - args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v2.3.0-license-compliance'] - id: 'PullArgoWorkflowController' -- name: 'gcr.io/cloud-builders/docker' - args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v2.3.0-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$TAG_NAME'] - waitFor: ['PullArgoWorkflowController'] + args: ['pull', 'gcr.io/ml-pipeline/workflow-controller:v2.7.5-license-compliance'] + id: 'pullArgoWorkflowController' +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v2.7.5-license-compliance', 'gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$TAG_NAME'] + id: 'tagArgoWorkflowControllerForMarketplace' + waitFor: ['pullArgoWorkflowController'] +- name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/ml-pipeline/workflow-controller:v2.7.5-license-compliance', 'gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$TAG_NAME'] + id: 'tagArgoWorkflowControllerForMarketplaceTest' + waitFor: ['pullArgoWorkflowController'] +- id: 'tagArgoWorkflowControllerForMarketplaceMajorMinor' + waitFor: ['pullArgoWorkflowController', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/ml-pipeline/workflow-controller:v2.7.5-license-compliance gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$(cat /workspace/mm.ver) + docker tag gcr.io/ml-pipeline/workflow-controller:v2.7.5-license-compliance gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$(cat /workspace/mm.ver) + +# Marketplace specific deployer and specific primary image +- id: 'pullMarketplaceDeployer' + name: 'gcr.io/cloud-builders/docker' + args: ['pull', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA'] + waitFor: ['-'] +- id: 'tagMarketplaceDeployerVersionNumber' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines/deployer:$TAG_NAME'] + waitFor: ['pullMarketplaceDeployer'] +- id: 'tagMarketplaceDeployerVersionNumberTest' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test/deployer:$TAG_NAME'] + waitFor: ['pullMarketplaceDeployer'] +- id: 'tagMarketplaceDeployerVersionNumberMajorMinor' + waitFor: ['pullMarketplaceDeployer', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines/deployer:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test/deployer:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines/deployer:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test/deployer:$(cat /workspace/mm.ver) + +- id: 'tagMarketplacePrimaryVersionNumber' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines:$TAG_NAME'] + waitFor: ['pullMarketplaceDeployer'] +- id: 'tagMarketplacePrimaryVersionNumberTest' + name: 'gcr.io/cloud-builders/docker' + args: ['tag', 'gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA', 'gcr.io/ml-pipeline/google/pipelines-test:$TAG_NAME'] + waitFor: ['pullMarketplaceDeployer'] +- id: 'tagMarketplacePrimaryVersionNumberMajorMinor' + waitFor: ['pullMarketplaceDeployer', 'parseMajorMinorVersion'] + name: 'gcr.io/cloud-builders/docker' + entrypoint: bash + args: + - -ceux + - | + docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines:$(cat /workspace/mm.ver) + docker tag gcr.io/$PROJECT_ID/deployer:$COMMIT_SHA gcr.io/ml-pipeline/google/pipelines-test:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines:$(cat /workspace/mm.ver) + docker push gcr.io/ml-pipeline/google/pipelines-test:$(cat /workspace/mm.ver) # Copy the Python SDK - name: 'gcr.io/cloud-builders/gsutil' @@ -201,21 +554,7 @@ steps: id: 'copyPythonComponentSDKToLatest' waitFor: ['copyPythonComponentSDKLocal'] -# Generate and copy the pipeline-lite deployment YAML -- name: 'google/cloud-sdk' - args: ['bash', '-c', 'kubectl kustomize /workspace/manifests/kustomize/env/dev > namespaced-install.yaml'] - id: 'generateDeploymentYaml' -- name: 'gcr.io/cloud-builders/gsutil' - args: ['cp', 'namespaced-install.yaml', 'gs://ml-pipeline/pipeline-lite/$TAG_NAME/namespaced-install.yaml'] - id: 'copyDeploymentYaml' - waitFor: ['generateDeploymentYaml'] - - images: -- 'gcr.io/ml-pipeline/frontend:$TAG_NAME' -- 'gcr.io/ml-pipeline/frontend:$COMMIT_SHA' -- 'gcr.io/ml-pipeline/api-server:$TAG_NAME' -- 'gcr.io/ml-pipeline/api-server:$COMMIT_SHA' - 'gcr.io/ml-pipeline/scheduledworkflow:$TAG_NAME' - 'gcr.io/ml-pipeline/scheduledworkflow:$COMMIT_SHA' - 'gcr.io/ml-pipeline/persistenceagent:$TAG_NAME' @@ -226,8 +565,12 @@ images: - 'gcr.io/ml-pipeline/inverse-proxy-agent:$COMMIT_SHA' - 'gcr.io/ml-pipeline/visualization-server:$TAG_NAME' - 'gcr.io/ml-pipeline/visualization-server:$COMMIT_SHA' -- 'gcr.io/ml-pipeline/google/pipelines/frontend:$TAG_NAME' -- 'gcr.io/ml-pipeline/google/pipelines/apiserver:$TAG_NAME' +- 'gcr.io/ml-pipeline/metadata-writer:$TAG_NAME' +- 'gcr.io/ml-pipeline/metadata-writer:$COMMIT_SHA' +- 'gcr.io/ml-pipeline/cache-server:$TAG_NAME' +- 'gcr.io/ml-pipeline/cache-server:$COMMIT_SHA' +- 'gcr.io/ml-pipeline/cache-deployer:$TAG_NAME' +- 'gcr.io/ml-pipeline/cache-deployer:$COMMIT_SHA' - 'gcr.io/ml-pipeline/google/pipelines/scheduledworkflow:$TAG_NAME' - 'gcr.io/ml-pipeline/google/pipelines/viewercrd:$TAG_NAME' - 'gcr.io/ml-pipeline/google/pipelines/persistenceagent:$TAG_NAME' @@ -240,6 +583,26 @@ images: - 'gcr.io/ml-pipeline/google/pipelines/argoexecutor:$TAG_NAME' - 'gcr.io/ml-pipeline/google/pipelines/argoworkflowcontroller:$TAG_NAME' - 'gcr.io/ml-pipeline/google/pipelines/metadataenvoy:$TAG_NAME' - +- 'gcr.io/ml-pipeline/google/pipelines/metadatawriter:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines/deployer:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines/cacheserver:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines/cachedeployer:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/scheduledworkflow:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/viewercrd:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/persistenceagent:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/proxyagent:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/visualizationserver:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/metadataserver:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/minio:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/mysql:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/cloudsqlproxy:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/argoexecutor:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/argoworkflowcontroller:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/metadataenvoy:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/metadatawriter:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/cacheserver:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/cachedeployer:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test/deployer:$TAG_NAME' +- 'gcr.io/ml-pipeline/google/pipelines-test:$TAG_NAME' timeout: '1200s' - diff --git a/.style.yapf b/.style.yapf new file mode 100644 index 00000000000..74248ecfe6e --- /dev/null +++ b/.style.yapf @@ -0,0 +1,9 @@ +[style] +based_on_style = google +blank_line_before_nested_class_or_def = true +column_limit = 80 +continuation_indent_width = 4 +dedent_closing_brackets = true +coalesce_brackets = true +indent_width = 2 +split_before_first_argument = true \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 6962de1e829..8d0c6b98947 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,75 +14,66 @@ matrix: include: - - language: node_js - node_js: "11.8.0" - addons: - chrome: stable - before_install: npm i -g npm@latest - script: - # Frontend tests - - cd $TRAVIS_BUILD_DIR/frontend - - node -v - - npm i - # Comment out next line if coverall has an ongoing outage to unblock. - - npm run test:coveralls - - language: generic - env: - - BAZEL_URL="https://github.com/bazelbuild/bazel/releases/download/0.23.0/bazel-0.23.0-installer-linux-x86_64.sh" - before_install: - # Install Bazel - - | - wget --no-verbose -O install.sh "${BAZEL_URL}" - chmod +x install.sh - ./install.sh --user - rm -f install.sh - - script: - # Backend unit tests - - cd $TRAVIS_BUILD_DIR/backend/src - - gimme -f 1.11.4 - - | - bazel --host_jvm_args=-Xmx500m --host_jvm_args=-Xms500m \ - build --noshow_progress --noshow_loading_progress --define=grpc_no_ares=true //backend/... - - | - bazel --host_jvm_args=-Xmx500m --host_jvm_args=-Xms500m \ - test --noshow_progress --noshow_loading_progress --define=grpc_no_ares=true //backend/... - - source ~/.gimme/envs/go1.11.4.env - - go vet -all -shadow ./agent/... - - go vet -all -shadow ./common/... - - go vet -all -shadow ./crd/... - - go test ./agent/... - - go test ./common/... - - go test ./crd/... - language: python python: "2.7" env: TOXENV=py27 script: + - python -m pip + - pip install six>=1.13.0 # Component SDK tests - cd $TRAVIS_BUILD_DIR/components/gcp/container/component_sdk/python - ./run_test.sh - language: python python: "3.5" env: TOXENV=py35 + before_install: + - export PYTHONPATH=$PYTHONPATH:/home/travis/.local/lib/python3.5/site-packages/ install: &0 + - python3 -m pip install --upgrade pip + - python3 -m pip install -r $TRAVIS_BUILD_DIR/sdk/python/requirements.txt # Additional dependencies - - pip3 install coverage coveralls jsonschema==3.0.1 + - pip3 install coverage==4.5.4 coveralls==1.9.2 six>=1.13.0 # Sample test infra dependencies - pip3 install minio - pip3 install junit_xml # Visualization test dependencies - cd $TRAVIS_BUILD_DIR/backend/src/apiserver/visualization - pip3 install -r requirements-test.txt - script: &1 - - # DSL tests + script: &1 # DSL tests - cd $TRAVIS_BUILD_DIR/sdk/python - - python3 setup.py develop + - python3 -m pip install -e . - cd $TRAVIS_BUILD_DIR # Changing the current directory to the repo root for correct coverall paths - coverage run --source=kfp --append sdk/python/tests/dsl/main.py - coverage run --source=kfp --append sdk/python/tests/compiler/main.py - coverage run --source=kfp --append -m unittest discover --verbose --start-dir sdk/python/tests --top-level-directory=sdk/python - - coveralls + #- coveralls + + # Test against TFX + # Compile and setup protobuf + - PROTOC_ZIP=protoc-3.7.1-linux-x86_64.zip + - curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/$PROTOC_ZIP + - sudo unzip -o $PROTOC_ZIP -d /usr/local bin/protoc + - sudo unzip -o $PROTOC_ZIP -d /usr/local 'include/*' + - rm -f $PROTOC_ZIP + # Install TFX from head + - cd $TRAVIS_BUILD_DIR + - git clone https://github.com/tensorflow/tfx.git + - cd $TRAVIS_BUILD_DIR/tfx + - pip3 install --upgrade pip + - pip3 install --upgrade 'numpy>=1.16,<1.17' + - set -x + - set -e + - python3 setup.py bdist_wheel + - WHEEL_PATH=$(find dist -name "tfx-*.whl") + - python3 -m pip install "${WHEEL_PATH}" --upgrade + - set +e + - set +x + # Three KFP-related unittests + - cd $TRAVIS_BUILD_DIR/tfx/tfx/orchestration/kubeflow + - python3 kubeflow_dag_runner_test.py + - cd $TRAVIS_BUILD_DIR/tfx/tfx/examples/chicago_taxi_pipeline + - python3 taxi_pipeline_kubeflow_gcp_test.py + - python3 taxi_pipeline_kubeflow_local_test.py # Visualization test - cd $TRAVIS_BUILD_DIR/backend/src/apiserver/visualization @@ -102,15 +93,19 @@ matrix: - language: python python: "3.6" env: TOXENV=py36 + before_install: + - export PYTHONPATH=$PYTHONPATH:/home/travis/.local/lib/python3.6/site-packages/ install: *0 script: *1 - language: python python: "3.7" env: TOXENV=py37 + before_install: + - export PYTHONPATH=$PYTHONPATH:/home/travis/.local/lib/python3.7/site-packages/ install: *0 script: *1 - name: "Lint Python code with flake8" language: python python: "3.7" install: pip install flake8 - script: flake8 . --count --exclude=backend/src/apiserver/visualization/types/*.py --select=E9,F63,F7,F82 --show-source --statistics + script: flake8 . --count --exclude=backend/src/apiserver/visualization/types/*.py --select=E9,F63,F7,F82 --show-source --statistics || true diff --git a/CHANGELOG.md b/CHANGELOG.md index 4295c1474c5..6548bf76f6e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,774 @@ -# Change Log +# Changelog + +## [0.5.1](https://github.com/kubeflow/pipelines/tree/0.5.1) (2020-05-07) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.4.0...0.5.1) + +Detailed PR can be found [here](https://github.com/kubeflow/pipelines/commits) + +## [0.4.0](https://github.com/kubeflow/pipelines/tree/0.4.0) (2020-04-07) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.3.0...0.4.0) + +**Merged pull requests:** + +- \[Release\] update version [\#3467](https://github.com/kubeflow/pipelines/pull/3467) ([numerology](https://github.com/numerology)) +- Release eb69a6b8ae2d82cd8574ed11f04af4607756061c [\#3466](https://github.com/kubeflow/pipelines/pull/3466) ([numerology](https://github.com/numerology)) +- \[Cache\] Change cache-deployer deployment to strategy recreate [\#3456](https://github.com/kubeflow/pipelines/pull/3456) ([Bobgy](https://github.com/Bobgy)) +- SDK - Components - Fixed bug in loading input-less graph components [\#3446](https://github.com/kubeflow/pipelines/pull/3446) ([Ark-kun](https://github.com/Ark-kun)) +- AWS Sagemaker : Updated documents [\#3440](https://github.com/kubeflow/pipelines/pull/3440) ([akartsky](https://github.com/akartsky)) +- Refactor the legacy way of using pipeline id to create run in KFP backend [\#3437](https://github.com/kubeflow/pipelines/pull/3437) ([jingzhang36](https://github.com/jingzhang36)) +- \[Backend\] Keep workflow service account when not default or empty [\#3435](https://github.com/kubeflow/pipelines/pull/3435) ([Bobgy](https://github.com/Bobgy)) +- \[Fix\]Cache - Revert objectSelector in mutatingwebhookconfiguration [\#3433](https://github.com/kubeflow/pipelines/pull/3433) ([rui5i](https://github.com/rui5i)) +- \[Manifest\] Cache - MKP deployment [\#3430](https://github.com/kubeflow/pipelines/pull/3430) ([rui5i](https://github.com/rui5i)) +- \[Backend\]Cache - Fix flag parse [\#3429](https://github.com/kubeflow/pipelines/pull/3429) ([rui5i](https://github.com/rui5i)) +- \[SDK\] Skip host name preprocess for the IAP case [\#3427](https://github.com/kubeflow/pipelines/pull/3427) ([chensun](https://github.com/chensun)) +- Update deployment resource version from /v1beta1 -\> /v1 [\#3421](https://github.com/kubeflow/pipelines/pull/3421) ([NikeNano](https://github.com/NikeNano)) +- \[SDK\]\[Multi-user\] refine sdk for multi-user support [\#3417](https://github.com/kubeflow/pipelines/pull/3417) ([chensun](https://github.com/chensun)) +- revert kfp-cache from Hosted/MKP [\#3416](https://github.com/kubeflow/pipelines/pull/3416) ([rmgogogo](https://github.com/rmgogogo)) +- \[Manifest\]Fix - Cache mkp deployment [\#3414](https://github.com/kubeflow/pipelines/pull/3414) ([rui5i](https://github.com/rui5i)) +- quick fix envoy proxy service name [\#3413](https://github.com/kubeflow/pipelines/pull/3413) ([rmgogogo](https://github.com/rmgogogo)) +- \[Backend\]Cache - Max cache staleness support [\#3411](https://github.com/kubeflow/pipelines/pull/3411) ([rui5i](https://github.com/rui5i)) +- enable native Keras + TFMA [\#3409](https://github.com/kubeflow/pipelines/pull/3409) ([rmgogogo](https://github.com/rmgogogo)) +- Fix presubmit failure by avoiding license downloading when building image [\#3406](https://github.com/kubeflow/pipelines/pull/3406) ([Bobgy](https://github.com/Bobgy)) +- Testing - Upgraded GKE master version to fix tests [\#3404](https://github.com/kubeflow/pipelines/pull/3404) ([Ark-kun](https://github.com/Ark-kun)) +- \[UI\] No longer pass namespace to createRun api [\#3403](https://github.com/kubeflow/pipelines/pull/3403) ([Bobgy](https://github.com/Bobgy)) +- Add new instructions to ensure compatibility for managed ai platform … [\#3400](https://github.com/kubeflow/pipelines/pull/3400) ([luotigerlsx](https://github.com/luotigerlsx)) +- AWS sagemaker : Added license files and updated Dockerfile to use AmazonLinux [\#3397](https://github.com/kubeflow/pipelines/pull/3397) ([akartsky](https://github.com/akartsky)) +- Fix lstrip with regex bug in the KFP client [\#3396](https://github.com/kubeflow/pipelines/pull/3396) ([sngahane](https://github.com/sngahane)) +- \[Sample\] Change preloaded TFX samples to use GCR images [\#3395](https://github.com/kubeflow/pipelines/pull/3395) ([numerology](https://github.com/numerology)) +- \[Backend\]\[Multi-user\] support multi-user mode for job APIs [\#3384](https://github.com/kubeflow/pipelines/pull/3384) ([chensun](https://github.com/chensun)) +- enable CloudSQL+GCSObjStore without default credential [\#3378](https://github.com/kubeflow/pipelines/pull/3378) ([rmgogogo](https://github.com/rmgogogo)) +- \[Manifest\] Cache - Enable cache and cache deployer in base kustomization file [\#3376](https://github.com/kubeflow/pipelines/pull/3376) ([rui5i](https://github.com/rui5i)) +- AWS sagemaker : Make 'endpoint\_url' argument default to None instead of empty string [\#3374](https://github.com/kubeflow/pipelines/pull/3374) ([akartsky](https://github.com/akartsky)) +- \[Backend\]Cache - KFP pod filter logic looking for cache\_enabled = true label selector [\#3368](https://github.com/kubeflow/pipelines/pull/3368) ([rui5i](https://github.com/rui5i)) +- \[Test\] Add argo retry in sample/integration tests to reduce flakiness. [\#3365](https://github.com/kubeflow/pipelines/pull/3365) ([numerology](https://github.com/numerology)) +- pass token outside of SDK for server-to-server case [\#3363](https://github.com/kubeflow/pipelines/pull/3363) ([rmgogogo](https://github.com/rmgogogo)) +- Integration test fix [\#3357](https://github.com/kubeflow/pipelines/pull/3357) ([rmgogogo](https://github.com/rmgogogo)) +- Fix Tensorboard viewer image parsing [\#3356](https://github.com/kubeflow/pipelines/pull/3356) ([rafaelbarreto87](https://github.com/rafaelbarreto87)) +- \[UI\] Tensorboard support for multi user [\#3355](https://github.com/kubeflow/pipelines/pull/3355) ([Bobgy](https://github.com/Bobgy)) +- \[Backend\] Cache - Add cache\_enabled label for cache filtering [\#3352](https://github.com/kubeflow/pipelines/pull/3352) ([rui5i](https://github.com/rui5i)) +- \[UI\] Add namespace filter for All and Archived Runs page [\#3351](https://github.com/kubeflow/pipelines/pull/3351) ([Bobgy](https://github.com/Bobgy)) +- \[UI\] Stops experiment list from leaking previous error message [\#3350](https://github.com/kubeflow/pipelines/pull/3350) ([Bobgy](https://github.com/Bobgy)) +- Update CHANGELOG for 0.3.0 [\#3349](https://github.com/kubeflow/pipelines/pull/3349) ([neuromage](https://github.com/neuromage)) +- Fix README formatting. [\#3348](https://github.com/kubeflow/pipelines/pull/3348) ([neuromage](https://github.com/neuromage)) +- \[Manifest\]Cache - mkp deployment [\#3343](https://github.com/kubeflow/pipelines/pull/3343) ([rui5i](https://github.com/rui5i)) +- \[Backend\]\[Multi-user\] Add authorization check on ListExperiment [\#3341](https://github.com/kubeflow/pipelines/pull/3341) ([chensun](https://github.com/chensun)) +- \[SDK/CLI\] Add version param to run\_pipeline [\#3339](https://github.com/kubeflow/pipelines/pull/3339) ([pselden](https://github.com/pselden)) +- \[Manifest\] Cache - Fix upgrade manifest [\#3338](https://github.com/kubeflow/pipelines/pull/3338) ([rui5i](https://github.com/rui5i)) +- \[Backend\]\[Multi-user\] Adjust/implement run api for multiuser support [\#3337](https://github.com/kubeflow/pipelines/pull/3337) ([chensun](https://github.com/chensun)) +- \[UI Server\] Blocks non public KFP report APIs [\#3334](https://github.com/kubeflow/pipelines/pull/3334) ([Bobgy](https://github.com/Bobgy)) +- \[Testing\] Disable cache-deployer temporarily because it blocks upgrade tests [\#3333](https://github.com/kubeflow/pipelines/pull/3333) ([Bobgy](https://github.com/Bobgy)) +- Add hostNetwork for marketplace proxy-agent manifest [\#3330](https://github.com/kubeflow/pipelines/pull/3330) ([Bobgy](https://github.com/Bobgy)) +- SDK - Tests - Improved tests for serializing lists containing objects [\#3326](https://github.com/kubeflow/pipelines/pull/3326) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Added pinned dependency snapshot [\#3303](https://github.com/kubeflow/pipelines/pull/3303) ([Ark-kun](https://github.com/Ark-kun)) +- \[SDK\]\[Multi-user\] Add namespace to experiment SDK calls [\#3272](https://github.com/kubeflow/pipelines/pull/3272) ([chensun](https://github.com/chensun)) +- Post-submit test for Hosted/MKP \(mpdev verify\) [\#3193](https://github.com/kubeflow/pipelines/pull/3193) ([rmgogogo](https://github.com/rmgogogo)) +- apiserver: Handle BucketExists\(\) error [\#3132](https://github.com/kubeflow/pipelines/pull/3132) ([discordianfish](https://github.com/discordianfish)) +- Manifests: Rename metadata gRPC server's resources to metadata-grpc-\* [\#3108](https://github.com/kubeflow/pipelines/pull/3108) ([elikatsis](https://github.com/elikatsis)) +- \[Sample\] CI Sample: Kaggle [\#3021](https://github.com/kubeflow/pipelines/pull/3021) ([dldaisy](https://github.com/dldaisy)) +- \[Frontend\] Node server artifact handler supports preview and handles both archived and unarchived \(gzip/tarball\) artifacts \#2172 [\#2992](https://github.com/kubeflow/pipelines/pull/2992) ([eterna2](https://github.com/eterna2)) + +## [0.3.0](https://github.com/kubeflow/pipelines/tree/0.3.0) (2020-03-23) +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.2.5...0.3.0) + +**Merged pull requests:** + +- Reduce steps for release cloud build yaml [\#3331](https://github.com/kubeflow/pipelines/pull/3331) ([IronPan](https://github.com/IronPan)) +- Bump version to 0.3.0 [\#3329](https://github.com/kubeflow/pipelines/pull/3329) ([neuromage](https://github.com/neuromage)) +- Release be497983cda7a1d17f3883c67e39a969cf0868a9 [\#3327](https://github.com/kubeflow/pipelines/pull/3327) ([neuromage](https://github.com/neuromage)) +- SDK - Tests - Moved imports to the start of the file [\#3325](https://github.com/kubeflow/pipelines/pull/3325) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Moved the @python\_component decorator test to dsl tests [\#3324](https://github.com/kubeflow/pipelines/pull/3324) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Moved python op pipeline compilation test to bridge tests [\#3323](https://github.com/kubeflow/pipelines/pull/3323) ([Ark-kun](https://github.com/Ark-kun)) +- \[UI\] Delete manifest.json [\#3320](https://github.com/kubeflow/pipelines/pull/3320) ([Bobgy](https://github.com/Bobgy)) +- SDK - Support kubernetes client v11 [\#3319](https://github.com/kubeflow/pipelines/pull/3319) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Added support for maxCacheStaleness [\#3318](https://github.com/kubeflow/pipelines/pull/3318) ([Ark-kun](https://github.com/Ark-kun)) +- Update Watson ML example to take output param path [\#3316](https://github.com/kubeflow/pipelines/pull/3316) ([Tomcli](https://github.com/Tomcli)) +- \[Test\] expose integration test flag [\#3307](https://github.com/kubeflow/pipelines/pull/3307) ([numerology](https://github.com/numerology)) +- Remove unused parameter [\#3306](https://github.com/kubeflow/pipelines/pull/3306) ([claytonlemons](https://github.com/claytonlemons)) +- \[Backend\] Always disable istio sidecar injection [\#3305](https://github.com/kubeflow/pipelines/pull/3305) ([Bobgy](https://github.com/Bobgy)) +- \[UI\] Show step pod yaml and events in RunDetails page [\#3304](https://github.com/kubeflow/pipelines/pull/3304) ([Bobgy](https://github.com/Bobgy)) +- Enable archiving logs [\#3301](https://github.com/kubeflow/pipelines/pull/3301) ([Ark-kun](https://github.com/Ark-kun)) +- \[Test\] Switch Travis dependency to TF2 [\#3299](https://github.com/kubeflow/pipelines/pull/3299) ([numerology](https://github.com/numerology)) +- \[Test\] Use a flag to separate integration test from sample test [\#3298](https://github.com/kubeflow/pipelines/pull/3298) ([numerology](https://github.com/numerology)) +- \[UI\] Auto refresh/redirect experiment list/detail page when user selects a different namespace [\#3297](https://github.com/kubeflow/pipelines/pull/3297) ([Bobgy](https://github.com/Bobgy)) +- \[SDK\] Improve host name handling [\#3287](https://github.com/kubeflow/pipelines/pull/3287) ([numerology](https://github.com/numerology)) +- Add Deletion button to archived runs [\#3285](https://github.com/kubeflow/pipelines/pull/3285) ([jingzhang36](https://github.com/jingzhang36)) +- \[Testing\] Recycle upgrade- clusters and reduce time lapse [\#3282](https://github.com/kubeflow/pipelines/pull/3282) ([Bobgy](https://github.com/Bobgy)) +- \[Frontend\] Fix reported vulnerabilities [\#3281](https://github.com/kubeflow/pipelines/pull/3281) ([Bobgy](https://github.com/Bobgy)) +- Update IBM Watson ML sample to use V4 api [\#3274](https://github.com/kubeflow/pipelines/pull/3274) ([kevinyu98](https://github.com/kevinyu98)) +- Add pipeline version to job/run integration test so that job/run is c… [\#3270](https://github.com/kubeflow/pipelines/pull/3270) ([jingzhang36](https://github.com/jingzhang36)) +- SDK - Moved \_dsl\_bridge to dsl [\#3267](https://github.com/kubeflow/pipelines/pull/3267) ([Ark-kun](https://github.com/Ark-kun)) +- \[Backend\]Cache - Cache logic with db interaction [\#3266](https://github.com/kubeflow/pipelines/pull/3266) ([rui5i](https://github.com/rui5i)) +- Remove backend unit tests from Travis config [\#3264](https://github.com/kubeflow/pipelines/pull/3264) ([jingzhang36](https://github.com/jingzhang36)) +- SDK - Components - Fixed handling collection return values [\#3263](https://github.com/kubeflow/pipelines/pull/3263) ([Ark-kun](https://github.com/Ark-kun)) +- add presto pipeline component [\#3261](https://github.com/kubeflow/pipelines/pull/3261) ([mertkavi](https://github.com/mertkavi)) +- Update ai\_platform.ipynb [\#3260](https://github.com/kubeflow/pipelines/pull/3260) ([gogasca](https://github.com/gogasca)) +- SDK - Components - Make component structures hashable [\#3258](https://github.com/kubeflow/pipelines/pull/3258) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Tests - Testing command-line resolving explicitly [\#3257](https://github.com/kubeflow/pipelines/pull/3257) ([Ark-kun](https://github.com/Ark-kun)) +- Bump TFX to 0.21.2 [\#3255](https://github.com/kubeflow/pipelines/pull/3255) ([numerology](https://github.com/numerology)) +- Added 'workgroup' as optional argument to athena component [\#3254](https://github.com/kubeflow/pipelines/pull/3254) ([LeonardAukea](https://github.com/LeonardAukea)) +- \[UI\] Pass namespace to experiment api calls [\#3252](https://github.com/kubeflow/pipelines/pull/3252) ([Bobgy](https://github.com/Bobgy)) +- Fix ill-formed Cloud Build config [\#3245](https://github.com/kubeflow/pipelines/pull/3245) ([numerology](https://github.com/numerology)) +- \[Backend\]\[Multi-user\] Add namespace to Experiment API implementation [\#3243](https://github.com/kubeflow/pipelines/pull/3243) ([chensun](https://github.com/chensun)) +- Clean up kataras dependency [\#3240](https://github.com/kubeflow/pipelines/pull/3240) ([jingzhang36](https://github.com/jingzhang36)) +- \[UI\] Fix SideNav too tall when collapsed [\#3239](https://github.com/kubeflow/pipelines/pull/3239) ([Bobgy](https://github.com/Bobgy)) +- \[Manifest\] fix version for MLMD store [\#3237](https://github.com/kubeflow/pipelines/pull/3237) ([numerology](https://github.com/numerology)) +- Parametrize the image used for the Tensorboard viewer [\#3235](https://github.com/kubeflow/pipelines/pull/3235) ([rafaelbarreto87](https://github.com/rafaelbarreto87)) +- retry pipeline always fail \(\#3230\) [\#3231](https://github.com/kubeflow/pipelines/pull/3231) ([xiaohanhuang](https://github.com/xiaohanhuang)) +- \[Frontend\] Fix the TFMA/TFDV viz behavior when caching is used. [\#3228](https://github.com/kubeflow/pipelines/pull/3228) ([numerology](https://github.com/numerology)) +- \[UI\] Human readable workflow task display names [\#3225](https://github.com/kubeflow/pipelines/pull/3225) ([Bobgy](https://github.com/Bobgy)) +- \[SDK\] Use new released kfp-server-api package [\#3224](https://github.com/kubeflow/pipelines/pull/3224) ([Bobgy](https://github.com/Bobgy)) +- Bump TFX to 0.21.1 [\#3220](https://github.com/kubeflow/pipelines/pull/3220) ([numerology](https://github.com/numerology)) +- \[Frontend\] GKE metadata endpoint should fail when fetched response is not ok [\#3218](https://github.com/kubeflow/pipelines/pull/3218) ([Bobgy](https://github.com/Bobgy)) +- \[Deployment\] Add secure=false explicitly in manifests for better observability [\#3217](https://github.com/kubeflow/pipelines/pull/3217) ([Bobgy](https://github.com/Bobgy)) +- SDK/DSL: Enable the deletion of a resource via ResourceOp method [\#3213](https://github.com/kubeflow/pipelines/pull/3213) ([elikatsis](https://github.com/elikatsis)) +- add an optional name for dsl.Condition \(kubeflow\#3210\) [\#3212](https://github.com/kubeflow/pipelines/pull/3212) ([xiaohanhuang](https://github.com/xiaohanhuang)) +- Pin TFT to 0.21.0 [\#3209](https://github.com/kubeflow/pipelines/pull/3209) ([numerology](https://github.com/numerology)) +- \[SDK\] New fields for schedule pipeline [\#3201](https://github.com/kubeflow/pipelines/pull/3201) ([Bobgy](https://github.com/Bobgy)) +- Fix presubmit sample tests base image [\#3200](https://github.com/kubeflow/pipelines/pull/3200) ([rui5i](https://github.com/rui5i)) +- \[Backend\]\[Multi-user\] Add resource reference to Experiment API [\#3198](https://github.com/kubeflow/pipelines/pull/3198) ([chensun](https://github.com/chensun)) +- \[UI\] Better logging in UI server [\#3197](https://github.com/kubeflow/pipelines/pull/3197) ([Bobgy](https://github.com/Bobgy)) +- \[UI\] Get execution name from name field [\#3196](https://github.com/kubeflow/pipelines/pull/3196) ([Bobgy](https://github.com/Bobgy)) +- revert \#3168 as it doesn't work for MKP, put myself to reviewer [\#3191](https://github.com/kubeflow/pipelines/pull/3191) ([rmgogogo](https://github.com/rmgogogo)) +- Check Tensorboard Instance [\#3189](https://github.com/kubeflow/pipelines/pull/3189) ([jingzhang36](https://github.com/jingzhang36)) +- \[Tutorial\] - Fixed lightweight component tutorial with bad metadata usage [\#3186](https://github.com/kubeflow/pipelines/pull/3186) ([luotigerlsx](https://github.com/luotigerlsx)) +- backend: Pin python dependencies [\#3161](https://github.com/kubeflow/pipelines/pull/3161) ([discordianfish](https://github.com/discordianfish)) +- Backend - Validate compiled samples [\#3120](https://github.com/kubeflow/pipelines/pull/3120) ([Ark-kun](https://github.com/Ark-kun)) +- TFX Iris sample [\#3119](https://github.com/kubeflow/pipelines/pull/3119) ([numerology](https://github.com/numerology)) +- Fix config fieldname `PipelineFolder` to `PipelinePath` [\#3056](https://github.com/kubeflow/pipelines/pull/3056) ([eterna2](https://github.com/eterna2)) +- SDK/CLI: Implement CLI delete pipeline command [\#2879](https://github.com/kubeflow/pipelines/pull/2879) ([sh-tatsuno](https://github.com/sh-tatsuno)) +- \[Testing\] KFP standalone test infra for upgradability [\#1971](https://github.com/kubeflow/pipelines/pull/1971) ([Bobgy](https://github.com/Bobgy)) + +## [0.2.5](https://github.com/kubeflow/pipelines/tree/0.2.5) (2020-02-27) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.2.4...0.2.5) + +**Merged pull requests:** + +- Release 0.2.5 - e66dcb18607406330f953bf99b04fe7c3ed1a4a8 [\#3179](https://github.com/kubeflow/pipelines/pull/3179) ([Bobgy](https://github.com/Bobgy)) +- tiny changes to let user aware should use TF 2.1 Notebook instance [\#3178](https://github.com/kubeflow/pipelines/pull/3178) ([rmgogogo](https://github.com/rmgogogo)) +- \[UI\] Supports storage: "inline" for all types of visualizations [\#3177](https://github.com/kubeflow/pipelines/pull/3177) ([Bobgy](https://github.com/Bobgy)) +- Bump to latest kubeflow/frontend hash with many-to-many execution fix [\#3176](https://github.com/kubeflow/pipelines/pull/3176) ([kwasi](https://github.com/kwasi)) +- start page updates according latest feedback [\#3175](https://github.com/kubeflow/pipelines/pull/3175) ([rmgogogo](https://github.com/rmgogogo)) +- Add integration tests for version related methods [\#3174](https://github.com/kubeflow/pipelines/pull/3174) ([jingzhang36](https://github.com/jingzhang36)) +- Upload pipeline version return a response with enum as string [\#3171](https://github.com/kubeflow/pipelines/pull/3171) ([jingzhang36](https://github.com/jingzhang36)) +- \[Sample\] Disable multi-threading for TFX demo notebook [\#3170](https://github.com/kubeflow/pipelines/pull/3170) ([numerology](https://github.com/numerology)) +- \[UI\] Always show stackdriver link in GKE [\#3167](https://github.com/kubeflow/pipelines/pull/3167) ([Bobgy](https://github.com/Bobgy)) +- \[Frontend\] snapshot diff setup [\#3166](https://github.com/kubeflow/pipelines/pull/3166) ([Bobgy](https://github.com/Bobgy)) +- Update frontend swagger generated code [\#3163](https://github.com/kubeflow/pipelines/pull/3163) ([jingzhang36](https://github.com/jingzhang36)) +- Fix a missing query parameter in swagger json file for version upload api [\#3160](https://github.com/kubeflow/pipelines/pull/3160) ([jingzhang36](https://github.com/jingzhang36)) +- \[Frontend\] Prepare snapshot updates for cra migration [\#3159](https://github.com/kubeflow/pipelines/pull/3159) ([Bobgy](https://github.com/Bobgy)) +- \[Frontend\] Migrate to create-react-app [\#3156](https://github.com/kubeflow/pipelines/pull/3156) ([Bobgy](https://github.com/Bobgy)) +- \[Frontend\] Update deps to bring optional chaining js language feature [\#3154](https://github.com/kubeflow/pipelines/pull/3154) ([Bobgy](https://github.com/Bobgy)) +- \[UI\] Update frontend/README.md [\#3153](https://github.com/kubeflow/pipelines/pull/3153) ([Bobgy](https://github.com/Bobgy)) +- Add a message in 'Get Started Page' [\#3152](https://github.com/kubeflow/pipelines/pull/3152) ([chensun](https://github.com/chensun)) +- \[Test\] Update doc about image position [\#3151](https://github.com/kubeflow/pipelines/pull/3151) ([numerology](https://github.com/numerology)) +- Update 'TFX demo link' [\#3150](https://github.com/kubeflow/pipelines/pull/3150) ([chensun](https://github.com/chensun)) +- \[Sample\] Update doc in taxi pipeline demo [\#3149](https://github.com/kubeflow/pipelines/pull/3149) ([numerology](https://github.com/numerology)) +- \[Frontend\] Pin links in getting started page when release [\#3147](https://github.com/kubeflow/pipelines/pull/3147) ([numerology](https://github.com/numerology)) +- \[Test\] Turn back on TFX unit test [\#3146](https://github.com/kubeflow/pipelines/pull/3146) ([numerology](https://github.com/numerology)) +- \[Backend\] Fix parameter patching [\#3145](https://github.com/kubeflow/pipelines/pull/3145) ([numerology](https://github.com/numerology)) +- Fix broken doc link [\#3144](https://github.com/kubeflow/pipelines/pull/3144) ([numerology](https://github.com/numerology)) +- \[Frontend\] Update contribution doc with start proxy instructions and npm daily practices [\#3142](https://github.com/kubeflow/pipelines/pull/3142) ([Bobgy](https://github.com/Bobgy)) +- \[MLMD\]\[Lineage\] Navigate to ArtifactDetails Overview on row click \[long term\] [\#3141](https://github.com/kubeflow/pipelines/pull/3141) ([Bobgy](https://github.com/Bobgy)) +- \[frontend\]: Use Promise.all to fetch clusterName and projectId [\#3140](https://github.com/kubeflow/pipelines/pull/3140) ([Kriyszig](https://github.com/Kriyszig)) +- \[Backend\] swf catchup option integration tests [\#3139](https://github.com/kubeflow/pipelines/pull/3139) ([Bobgy](https://github.com/Bobgy)) +- \[Backend\] Refactor integration tests, facilitate local testing [\#3138](https://github.com/kubeflow/pipelines/pull/3138) ([Bobgy](https://github.com/Bobgy)) +- Update link to TFX demo in getting start page [\#3136](https://github.com/kubeflow/pipelines/pull/3136) ([numerology](https://github.com/numerology)) +- \[Test\] Temporarily bypass TFX unittest [\#3135](https://github.com/kubeflow/pipelines/pull/3135) ([numerology](https://github.com/numerology)) +- \[UI\] Scheduled workflow catchup=false option [\#3131](https://github.com/kubeflow/pipelines/pull/3131) ([Bobgy](https://github.com/Bobgy)) +- \[DB\] Add NoCatchup field to Job db model [\#3129](https://github.com/kubeflow/pipelines/pull/3129) ([Bobgy](https://github.com/Bobgy)) +- SDK - Fix SDK on Python 3.8 [\#3126](https://github.com/kubeflow/pipelines/pull/3126) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Compiler - Fix incompatibility with python3.5 [\#3122](https://github.com/kubeflow/pipelines/pull/3122) ([Ark-kun](https://github.com/Ark-kun)) +- Bump TFX dependency [\#3121](https://github.com/kubeflow/pipelines/pull/3121) ([numerology](https://github.com/numerology)) +- \[UIServer\] Add DISABLE\_GKE\_METADATA env flag to skip metadata retrieval. [\#3118](https://github.com/kubeflow/pipelines/pull/3118) ([Bobgy](https://github.com/Bobgy)) +- SDK - Reduce python component limitations - no import errors for cust… [\#3106](https://github.com/kubeflow/pipelines/pull/3106) ([Ark-kun](https://github.com/Ark-kun)) +- apiserver: Close rows before reusing tx [\#3099](https://github.com/kubeflow/pipelines/pull/3099) ([discordianfish](https://github.com/discordianfish)) +- nit: Add BigQuery role [\#3097](https://github.com/kubeflow/pipelines/pull/3097) ([gogasca](https://github.com/gogasca)) +- \[Sample\] CI Sample: mnist [\#3013](https://github.com/kubeflow/pipelines/pull/3013) ([dldaisy](https://github.com/dldaisy)) + +## [0.2.4](https://github.com/kubeflow/pipelines/tree/0.2.4) (2020-02-20) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.2.3...0.2.4) + +**Merged pull requests:** + +- \[Sample\] update README [\#3127](https://github.com/kubeflow/pipelines/pull/3127) ([numerology](https://github.com/numerology)) +- \[Sample\] typo fix [\#3125](https://github.com/kubeflow/pipelines/pull/3125) ([numerology](https://github.com/numerology)) +- \[UI\] Deduplicate requests to gke metadata [\#3117](https://github.com/kubeflow/pipelines/pull/3117) ([Bobgy](https://github.com/Bobgy)) +- \[Visualization\] Clean up full.tpl to improve vis loading time [\#3116](https://github.com/kubeflow/pipelines/pull/3116) ([Bobgy](https://github.com/Bobgy)) +- bump version to 0.2.4 and tiny BTW doc fix [\#3115](https://github.com/kubeflow/pipelines/pull/3115) ([rmgogogo](https://github.com/rmgogogo)) +- \[UI\] Clean up backstop js usages [\#3113](https://github.com/kubeflow/pipelines/pull/3113) ([Bobgy](https://github.com/Bobgy)) +- TFMA auto-visualization for TFX components in KFP [\#3111](https://github.com/kubeflow/pipelines/pull/3111) ([jingzhang36](https://github.com/jingzhang36)) +- SDK - Components - Fixed dict-style type annotations [\#3107](https://github.com/kubeflow/pipelines/pull/3107) ([Ark-kun](https://github.com/Ark-kun)) +- ml\_engine component READMEs incorrect [\#3103](https://github.com/kubeflow/pipelines/pull/3103) ([TheMichaelHu](https://github.com/TheMichaelHu)) +- \[Lineage\] Update kubeflow/frontend to latest hash to pick up bug bash fixes [\#3102](https://github.com/kubeflow/pipelines/pull/3102) ([kwasi](https://github.com/kwasi)) +- Remove pinning avro-python3, because it is already fixed [\#3096](https://github.com/kubeflow/pipelines/pull/3096) ([Bobgy](https://github.com/Bobgy)) +- SDK - Compiler - Add optional Argo validation [\#3094](https://github.com/kubeflow/pipelines/pull/3094) ([Ark-kun](https://github.com/Ark-kun)) +- XGBoost demo - change quota calculation [\#3093](https://github.com/kubeflow/pipelines/pull/3093) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[Doc\] Call out quota requirement in XGBoost sample [\#3092](https://github.com/kubeflow/pipelines/pull/3092) ([numerology](https://github.com/numerology)) +- SDK - Fixed the broken kfp.gcp.use\_preemptible\_nodepool extension [\#3091](https://github.com/kubeflow/pipelines/pull/3091) ([Ark-kun](https://github.com/Ark-kun)) +- Fix api generation script [\#3090](https://github.com/kubeflow/pipelines/pull/3090) ([chensun](https://github.com/chensun)) +- enable tfdv, remove hardcode as a sample for following PRs [\#3089](https://github.com/kubeflow/pipelines/pull/3089) ([rmgogogo](https://github.com/rmgogogo)) +- \[UI\] deep links to pipeline details page from start page [\#3086](https://github.com/kubeflow/pipelines/pull/3086) ([Bobgy](https://github.com/Bobgy)) +- Pin Pyarrow version to 0.15.1 after we upgrade to use tfdv 0.21.1 [\#3083](https://github.com/kubeflow/pipelines/pull/3083) ([jingzhang36](https://github.com/jingzhang36)) +- \[UI\] Visualization only loads on completed nodes [\#3082](https://github.com/kubeflow/pipelines/pull/3082) ([Bobgy](https://github.com/Bobgy)) +- \[Community\] Improve BUG\_REPORT.md [\#3081](https://github.com/kubeflow/pipelines/pull/3081) ([Bobgy](https://github.com/Bobgy)) +- Samples - Add explanation about pipeline failing intentionally [\#3076](https://github.com/kubeflow/pipelines/pull/3076) ([Ark-kun](https://github.com/Ark-kun)) +- \[Controller\] Scheduled workflow supports catch up false [\#3073](https://github.com/kubeflow/pipelines/pull/3073) ([Bobgy](https://github.com/Bobgy)) +- update version from 0.2.2 to 0.2.3 for kfp [\#3072](https://github.com/kubeflow/pipelines/pull/3072) ([jingzhang36](https://github.com/jingzhang36)) +- \[Component\] Add quota check in diagnose me component. [\#3062](https://github.com/kubeflow/pipelines/pull/3062) ([numerology](https://github.com/numerology)) +- XGBoost demo - Removing unnecessary parameters and adding quota check. [\#3049](https://github.com/kubeflow/pipelines/pull/3049) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- Backend - Removed Tensorflow from backend WORKSPACE [\#2856](https://github.com/kubeflow/pipelines/pull/2856) ([Ark-kun](https://github.com/Ark-kun)) + +## [0.2.3](https://github.com/kubeflow/pipelines/tree/0.2.3) (2020-02-13) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.2.2...0.2.3) + +**Merged pull requests:** + +- quick fix for quota list [\#3075](https://github.com/kubeflow/pipelines/pull/3075) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[Fix\] Pin avro==1.9.1 [\#3067](https://github.com/kubeflow/pipelines/pull/3067) ([numerology](https://github.com/numerology)) +- \[Doc\] Make it explicit that hosted beta does not require GCP SA [\#3066](https://github.com/kubeflow/pipelines/pull/3066) ([numerology](https://github.com/numerology)) +- Fix Pipeline/Workspace field in ArtifactList table [\#3063](https://github.com/kubeflow/pipelines/pull/3063) ([kwasi](https://github.com/kwasi)) +- update TFDV to 0.21.1 which can work for Chrome 80 [\#3060](https://github.com/kubeflow/pipelines/pull/3060) ([rmgogogo](https://github.com/rmgogogo)) +- \[UI\] Upload pipeline version from local package [\#3059](https://github.com/kubeflow/pipelines/pull/3059) ([Bobgy](https://github.com/Bobgy)) +- \[Test\] Add docker daemon start in test base image [\#3053](https://github.com/kubeflow/pipelines/pull/3053) ([rui5i](https://github.com/rui5i)) +- Adding auth list support for diagnose\_me [\#3052](https://github.com/kubeflow/pipelines/pull/3052) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- updating links and service account message [\#3044](https://github.com/kubeflow/pipelines/pull/3044) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[UI\] Circular progress for TFX visualization [\#3043](https://github.com/kubeflow/pipelines/pull/3043) ([Bobgy](https://github.com/Bobgy)) +- Set auto-added artifacts optional in recurring run \(job\) [\#3041](https://github.com/kubeflow/pipelines/pull/3041) ([jingzhang36](https://github.com/jingzhang36)) +- Implement filtering in ListPipelines [\#3040](https://github.com/kubeflow/pipelines/pull/3040) ([jiezhang](https://github.com/jiezhang)) +- Updates for getting started page [\#3038](https://github.com/kubeflow/pipelines/pull/3038) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- Fix build failure [\#3035](https://github.com/kubeflow/pipelines/pull/3035) ([rui5i](https://github.com/rui5i)) +- \[Samples\] Update preloaded samples' descriptions [\#3034](https://github.com/kubeflow/pipelines/pull/3034) ([numerology](https://github.com/numerology)) +- add link to allow user easily report issue [\#3030](https://github.com/kubeflow/pipelines/pull/3030) ([rmgogogo](https://github.com/rmgogogo)) +- SDK - Compiler - Fixed ParallelFor argument resolving [\#3029](https://github.com/kubeflow/pipelines/pull/3029) ([Ark-kun](https://github.com/Ark-kun)) +- place holder for diagnose entry [\#3028](https://github.com/kubeflow/pipelines/pull/3028) ([rmgogogo](https://github.com/rmgogogo)) +- add myself to manifest/sample/test owner [\#3027](https://github.com/kubeflow/pipelines/pull/3027) ([rmgogogo](https://github.com/rmgogogo)) +- Add visualizations for Schema and ExampleAnomalies [\#3026](https://github.com/kubeflow/pipelines/pull/3026) ([Realsen](https://github.com/Realsen)) +- \[Testing\] Use google/cloud-sdk:279.0.0 to resolve workload identity flakiness [\#3019](https://github.com/kubeflow/pipelines/pull/3019) ([Bobgy](https://github.com/Bobgy)) +- \[Testing\] Use full scope cluster for testing to reduce flakiness [\#3018](https://github.com/kubeflow/pipelines/pull/3018) ([Bobgy](https://github.com/Bobgy)) +- \[Samples\] Update TFX module file [\#3017](https://github.com/kubeflow/pipelines/pull/3017) ([numerology](https://github.com/numerology)) +- \[Backend\] Improve parameter patching [\#3016](https://github.com/kubeflow/pipelines/pull/3016) ([numerology](https://github.com/numerology)) +- \[UI\] Fix side nav text alignment after adding start page [\#3015](https://github.com/kubeflow/pipelines/pull/3015) ([Bobgy](https://github.com/Bobgy)) +- \[UI\] Fix starting page casing [\#3012](https://github.com/kubeflow/pipelines/pull/3012) ([Bobgy](https://github.com/Bobgy)) +- Upload local file as new pipeline version Step 2 - Swagger generated code for UploadPipelineVersion method [\#3010](https://github.com/kubeflow/pipelines/pull/3010) ([jingzhang36](https://github.com/jingzhang36)) +- \[Backend\] Use pod namespace as default when not in multi user mode [\#3009](https://github.com/kubeflow/pipelines/pull/3009) ([Bobgy](https://github.com/Bobgy)) +- \[Testing\] Reduce flakiness caused by iam bindings [\#3008](https://github.com/kubeflow/pipelines/pull/3008) ([Bobgy](https://github.com/Bobgy)) +- Add numerology to SDK owner [\#3006](https://github.com/kubeflow/pipelines/pull/3006) ([numerology](https://github.com/numerology)) +- Fix issue with "grpc: received message larger than max ..." [\#3004](https://github.com/kubeflow/pipelines/pull/3004) ([chensun](https://github.com/chensun)) +- Deduce proxy type from the presence of client\_id [\#3003](https://github.com/kubeflow/pipelines/pull/3003) ([chensun](https://github.com/chensun)) +- \[Sample\] Update notebook sample link [\#3002](https://github.com/kubeflow/pipelines/pull/3002) ([numerology](https://github.com/numerology)) +- Upload local file as new pipeline version Step 1 [\#3001](https://github.com/kubeflow/pipelines/pull/3001) ([jingzhang36](https://github.com/jingzhang36)) +- Update auto-generated files \(auto-generated by swagger-gen\) [\#2998](https://github.com/kubeflow/pipelines/pull/2998) ([jingzhang36](https://github.com/jingzhang36)) +- Allow blank port for minio client to work with S3 [\#2996](https://github.com/kubeflow/pipelines/pull/2996) ([pavanaiyar](https://github.com/pavanaiyar)) +- \[Testing\]Add Dockerfile for KFP e2e test [\#2994](https://github.com/kubeflow/pipelines/pull/2994) ([rui5i](https://github.com/rui5i)) +- update changelog and document [\#2990](https://github.com/kubeflow/pipelines/pull/2990) ([rmgogogo](https://github.com/rmgogogo)) +- better default bucket name, less resource request [\#2989](https://github.com/kubeflow/pipelines/pull/2989) ([rmgogogo](https://github.com/rmgogogo)) +- fix for Init Action [\#2980](https://github.com/kubeflow/pipelines/pull/2980) ([rmgogogo](https://github.com/rmgogogo)) +- SDK - Components - create\_graph\_component\_from\_pipeline\_func now returns a function [\#2971](https://github.com/kubeflow/pipelines/pull/2971) ([Ark-kun](https://github.com/Ark-kun)) +- Include experiment ID as part of run table [\#2929](https://github.com/kubeflow/pipelines/pull/2929) ([frozeNinK](https://github.com/frozeNinK)) +- Bump tensorflow from 1.15.0 to 1.15.2 in /components/kubeflow/dnntrainer/src [\#2923](https://github.com/kubeflow/pipelines/pull/2923) ([dependabot[bot]](https://github.com/apps/dependabot)) +- Updated the API reference comments [\#2916](https://github.com/kubeflow/pipelines/pull/2916) ([OfficePop](https://github.com/OfficePop)) +- Samples - Updated the Data passing in python tutorial [\#2868](https://github.com/kubeflow/pipelines/pull/2868) ([Ark-kun](https://github.com/Ark-kun)) +- \[Sample\] CI Sample: helloworld [\#2833](https://github.com/kubeflow/pipelines/pull/2833) ([dldaisy](https://github.com/dldaisy)) +- Fix a typo in Tensorboard.tsx. Add a default TB version. [\#2832](https://github.com/kubeflow/pipelines/pull/2832) ([jingzhang36](https://github.com/jingzhang36)) +- \[api-server\] Object store folder path is configurable and can work with AWS \(secure and region flag, and IAM credentials\) [\#2080](https://github.com/kubeflow/pipelines/pull/2080) ([eterna2](https://github.com/eterna2)) + + +## [0.2.2](https://github.com/kubeflow/pipelines/tree/0.2.2) (2020-02-05) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.2.1...0.2.2) + +**Merged pull requests:** + +- better default bucket name, less resource request [\#2989](https://github.com/kubeflow/pipelines/pull/2989) ([rmgogogo](https://github.com/rmgogogo)) +- bump version to 0.2.2 [\#2988](https://github.com/kubeflow/pipelines/pull/2988) ([rmgogogo](https://github.com/rmgogogo)) +- \[SDK\] Fix CLI authentication [\#2986](https://github.com/kubeflow/pipelines/pull/2986) ([numerology](https://github.com/numerology)) +- Another fix of licenses [\#2984](https://github.com/kubeflow/pipelines/pull/2984) ([numerology](https://github.com/numerology)) +- Another license fix [\#2982](https://github.com/kubeflow/pipelines/pull/2982) ([numerology](https://github.com/numerology)) +- fix for Init Action [\#2980](https://github.com/kubeflow/pipelines/pull/2980) ([rmgogogo](https://github.com/rmgogogo)) +- \[Frontend\] Use latest kf frontend with bug fixes [\#2976](https://github.com/kubeflow/pipelines/pull/2976) ([Bobgy](https://github.com/Bobgy)) +- \[License\] Another license fix [\#2975](https://github.com/kubeflow/pipelines/pull/2975) ([numerology](https://github.com/numerology)) +- Fix third party license [\#2972](https://github.com/kubeflow/pipelines/pull/2972) ([numerology](https://github.com/numerology)) +- pin envoy [\#2968](https://github.com/kubeflow/pipelines/pull/2968) ([rmgogogo](https://github.com/rmgogogo)) +- Clean up metric columns [\#2964](https://github.com/kubeflow/pipelines/pull/2964) ([jingzhang36](https://github.com/jingzhang36)) +- Update changelog for 0.2.1 release. [\#2962](https://github.com/kubeflow/pipelines/pull/2962) ([neuromage](https://github.com/neuromage)) +- Bump version for release 0.2.1 of Pipelines [\#2961](https://github.com/kubeflow/pipelines/pull/2961) ([neuromage](https://github.com/neuromage)) +- Add support for automatic visualization of TFX artifacts [\#2957](https://github.com/kubeflow/pipelines/pull/2957) ([Realsen](https://github.com/Realsen)) + +## [0.2.1](https://github.com/kubeflow/pipelines/tree/0.2.1) (2020-02-02) +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.2.0...0.2.1) + +**Merged pull requests:** + +- Bump version for release 0.2.1 of Pipelines [\#2961](https://github.com/kubeflow/pipelines/pull/2961) ([neuromage](https://github.com/neuromage)) +- Release 85945e10920757efab999217c836403c024b09e3 [\#2960](https://github.com/kubeflow/pipelines/pull/2960) ([neuromage](https://github.com/neuromage)) +- \[Doc\] Improve vscode settings documentation [\#2959](https://github.com/kubeflow/pipelines/pull/2959) ([Bobgy](https://github.com/Bobgy)) +- \[UI\] Updating starting page UI links [\#2955](https://github.com/kubeflow/pipelines/pull/2955) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- Adding diagnose me component to XGboost sample [\#2953](https://github.com/kubeflow/pipelines/pull/2953) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[Frontend\] Change Tab name from source to YAML [\#2951](https://github.com/kubeflow/pipelines/pull/2951) ([numerology](https://github.com/numerology)) +- Bump TFX dependency to 0.21.0rc0 [\#2949](https://github.com/kubeflow/pipelines/pull/2949) ([numerology](https://github.com/numerology)) +- diagnose me component [\#2948](https://github.com/kubeflow/pipelines/pull/2948) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- Visualizations - Updated the TFMA and TFDV versions [\#2946](https://github.com/kubeflow/pipelines/pull/2946) ([Ark-kun](https://github.com/Ark-kun)) +- Samples - Add and change preloaded samples [\#2944](https://github.com/kubeflow/pipelines/pull/2944) ([Ark-kun](https://github.com/Ark-kun)) +- Adding support for getting all configmaps [\#2941](https://github.com/kubeflow/pipelines/pull/2941) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[Fix\]\[Test\] Fix post submit test on mkp deployment [\#2939](https://github.com/kubeflow/pipelines/pull/2939) ([rui5i](https://github.com/rui5i)) +- \[Backend\] Patch default bucket name and project ID [\#2938](https://github.com/kubeflow/pipelines/pull/2938) ([numerology](https://github.com/numerology)) +- \[UI\] Getting started page for hosted pipelines [\#2935](https://github.com/kubeflow/pipelines/pull/2935) ([Bobgy](https://github.com/Bobgy)) +- \[MKP\] Add initializaion action for MKP deployer [\#2932](https://github.com/kubeflow/pipelines/pull/2932) ([numerology](https://github.com/numerology)) +- Metadata: Update Metadata server version to v0.21.1 [\#2931](https://github.com/kubeflow/pipelines/pull/2931) ([dushyanthsc](https://github.com/dushyanthsc)) +- \[UI\] Adding Troubleshooting guide link to Error / Warning Banner [\#2930](https://github.com/kubeflow/pipelines/pull/2930) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[MLMD\]\[Lineage\] Integrate Lineage View into KFP [\#2918](https://github.com/kubeflow/pipelines/pull/2918) ([kwasi](https://github.com/kwasi)) +- Tests - Fixed presubmit tests [\#2912](https://github.com/kubeflow/pipelines/pull/2912) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Added the create\_component\_from\_func alias [\#2911](https://github.com/kubeflow/pipelines/pull/2911) ([Ark-kun](https://github.com/Ark-kun)) +- \[Fix\] Temporarily pin beam version to 2.17 [\#2909](https://github.com/kubeflow/pipelines/pull/2909) ([numerology](https://github.com/numerology)) +- Backend - Annotate Workflow with the run name [\#2902](https://github.com/kubeflow/pipelines/pull/2902) ([Ark-kun](https://github.com/Ark-kun)) +- Metadata: Updating metadata server version from 0.14.0 to v0.21.0 [\#2901](https://github.com/kubeflow/pipelines/pull/2901) ([dushyanthsc](https://github.com/dushyanthsc)) +- \[Test\] Read mkp tools image from manifest docker file [\#2900](https://github.com/kubeflow/pipelines/pull/2900) ([rui5i](https://github.com/rui5i)) +- \[SDK\] kfp client should not silently fail using gcloud [\#2896](https://github.com/kubeflow/pipelines/pull/2896) ([Bobgy](https://github.com/Bobgy)) +- \[UI\] Add tooltips to table name columns so they can be read fully [\#2894](https://github.com/kubeflow/pipelines/pull/2894) ([Bobgy](https://github.com/Bobgy)) +- Have custom visualization on by default [\#2891](https://github.com/kubeflow/pipelines/pull/2891) ([jingzhang36](https://github.com/jingzhang36)) +- Add numerology to test OWNERS [\#2890](https://github.com/kubeflow/pipelines/pull/2890) ([numerology](https://github.com/numerology)) +- Manifests/Kustomize: Add metadata-writer to images [\#2882](https://github.com/kubeflow/pipelines/pull/2882) ([elikatsis](https://github.com/elikatsis)) +- changelog for 0.2.0 [\#2881](https://github.com/kubeflow/pipelines/pull/2881) ([rmgogogo](https://github.com/rmgogogo)) +- Pin sample doc links during backend image build [\#2880](https://github.com/kubeflow/pipelines/pull/2880) ([Bobgy](https://github.com/Bobgy)) +- Make created\_at value follows RFC3339 time format. [\#2876](https://github.com/kubeflow/pipelines/pull/2876) ([jiyongjung0](https://github.com/jiyongjung0)) +- SDK - Components refactoring [\#2865](https://github.com/kubeflow/pipelines/pull/2865) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Made it easier to access component spec classes [\#2860](https://github.com/kubeflow/pipelines/pull/2860) ([Ark-kun](https://github.com/Ark-kun)) +- \[Test\] - Add kfp deployment method in test post submit script [\#2859](https://github.com/kubeflow/pipelines/pull/2859) ([rui5i](https://github.com/rui5i)) +- Kfp diagnose\_me pipeline [\#2843](https://github.com/kubeflow/pipelines/pull/2843) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[Windows\] PowerShell script added to start proxies for front-end UI [\#2835](https://github.com/kubeflow/pipelines/pull/2835) ([avdaredevil](https://github.com/avdaredevil)) +- Add new Ops to Azure Databricks for KFP: secretscope, workspaceitem & dbfsblock [\#2817](https://github.com/kubeflow/pipelines/pull/2817) ([magencio](https://github.com/magencio)) + +## [0.2.0](https://github.com/kubeflow/pipelines/tree/0.2.0) (2020-01-20) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.40...0.2.0) + +**Merged pull requests:** + +- fix the metadata writer manifest [\#2878](https://github.com/kubeflow/pipelines/pull/2878) ([rmgogogo](https://github.com/rmgogogo)) +- Coverage reporting for frontend server [\#2877](https://github.com/kubeflow/pipelines/pull/2877) ([Bobgy](https://github.com/Bobgy)) +- enlarge timeout to 45m to avoid flakyness [\#2875](https://github.com/kubeflow/pipelines/pull/2875) ([rmgogogo](https://github.com/rmgogogo)) +- Better Hosted Release - automate things as possible as we can [\#2874](https://github.com/kubeflow/pipelines/pull/2874) ([rmgogogo](https://github.com/rmgogogo)) +- Build deployer for each post-submit to avoid manual work [\#2873](https://github.com/kubeflow/pipelines/pull/2873) ([rmgogogo](https://github.com/rmgogogo)) +- from 0.1.40 to 0.2.0 [\#2872](https://github.com/kubeflow/pipelines/pull/2872) ([rmgogogo](https://github.com/rmgogogo)) +- enable full scope for testing version [\#2871](https://github.com/kubeflow/pipelines/pull/2871) ([rmgogogo](https://github.com/rmgogogo)) +- Samples - Updated the TFX-KFP pipeline [\#2867](https://github.com/kubeflow/pipelines/pull/2867) ([Ark-kun](https://github.com/Ark-kun)) +- Frontend - Metadata - Use custom properties in addition to plain properties [\#2854](https://github.com/kubeflow/pipelines/pull/2854) ([Ark-kun](https://github.com/Ark-kun)) +- fix panic in Viewer cleanup [\#2853](https://github.com/kubeflow/pipelines/pull/2853) ([mattnworb](https://github.com/mattnworb)) +- \[Proxy\] Split domain name [\#2851](https://github.com/kubeflow/pipelines/pull/2851) ([numerology](https://github.com/numerology)) +- \[Test\] Add KFP MKP deployment for e2e tests [\#2850](https://github.com/kubeflow/pipelines/pull/2850) ([rui5i](https://github.com/rui5i)) +- Opt out proxy agent configmap [\#2849](https://github.com/kubeflow/pipelines/pull/2849) ([IronPan](https://github.com/IronPan)) +- \[Doc\] Update link of preload TFX sample [\#2839](https://github.com/kubeflow/pipelines/pull/2839) ([numerology](https://github.com/numerology)) +- Fix wrong example with container\_op [\#2838](https://github.com/kubeflow/pipelines/pull/2838) ([kim-sardine](https://github.com/kim-sardine)) +- \[UI\] Simplify start server script and fix local cluster name endpoint [\#2836](https://github.com/kubeflow/pipelines/pull/2836) ([Bobgy](https://github.com/Bobgy)) +- Fix a typo in Tensorboard.tsx. Add a default TB version. [\#2832](https://github.com/kubeflow/pipelines/pull/2832) ([jingzhang36](https://github.com/jingzhang36)) +- \[UI\] Update node to 12 [\#2830](https://github.com/kubeflow/pipelines/pull/2830) ([Bobgy](https://github.com/Bobgy)) +- visualization server wants kubernetes serivce account too [\#2822](https://github.com/kubeflow/pipelines/pull/2822) ([jingzhang36](https://github.com/jingzhang36)) +- \[testing\] Fix e2e tests timed out, but there's empty test log [\#2810](https://github.com/kubeflow/pipelines/pull/2810) ([Bobgy](https://github.com/Bobgy)) +- \[Sample\] Update the Doc for TFX sample [\#2798](https://github.com/kubeflow/pipelines/pull/2798) ([numerology](https://github.com/numerology)) +- \[UI\] Add editor for json parameters in new run page [\#2747](https://github.com/kubeflow/pipelines/pull/2747) ([drewbutlerbb4](https://github.com/drewbutlerbb4)) +- Bump tensorflow from 1.12.1 to 1.15.0 in /components/kubeflow/dnntrainer/src [\#2743](https://github.com/kubeflow/pipelines/pull/2743) ([dependabot[bot]](https://github.com/apps/dependabot)) +- Update deployer base to 0.1.40 [\#2696](https://github.com/kubeflow/pipelines/pull/2696) ([rmgogogo](https://github.com/rmgogogo)) +- Service - Metadata writer [\#2674](https://github.com/kubeflow/pipelines/pull/2674) ([Ark-kun](https://github.com/Ark-kun)) +- Install application CRD and add pipeline application CR to pipeline standalone [\#2585](https://github.com/kubeflow/pipelines/pull/2585) ([IronPan](https://github.com/IronPan)) +- better handle inverse-proxy configmap for better GC [\#2391](https://github.com/kubeflow/pipelines/pull/2391) ([rmgogogo](https://github.com/rmgogogo)) + +## [0.1.40](https://github.com/kubeflow/pipelines/tree/0.1.40) (2020-01-09) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.39...0.1.40) + +**Merged pull requests:** + +- \[Backend\] Fix report workflow error messages [\#2815](https://github.com/kubeflow/pipelines/pull/2815) ([Bobgy](https://github.com/Bobgy)) +- Pin pyarrow version [\#2813](https://github.com/kubeflow/pipelines/pull/2813) ([numerology](https://github.com/numerology)) +- \[UI\] fix artifact handler query parameter key [\#2809](https://github.com/kubeflow/pipelines/pull/2809) ([Bobgy](https://github.com/Bobgy)) +- Fix node server typing problems [\#2807](https://github.com/kubeflow/pipelines/pull/2807) ([Bobgy](https://github.com/Bobgy)) +- add jiaxiao to the component owners [\#2804](https://github.com/kubeflow/pipelines/pull/2804) ([gaoning777](https://github.com/gaoning777)) +- \[Component\] Fix the component arguments [\#2803](https://github.com/kubeflow/pipelines/pull/2803) ([numerology](https://github.com/numerology)) +- update changelog [\#2801](https://github.com/kubeflow/pipelines/pull/2801) ([gaoning777](https://github.com/gaoning777)) +- \[Manifest/Doc\] Another version bumping [\#2797](https://github.com/kubeflow/pipelines/pull/2797) ([numerology](https://github.com/numerology)) +- Making json output format easier to read [\#2792](https://github.com/kubeflow/pipelines/pull/2792) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[Doc\] Change sample/component/sdk documentation to not use `use\_gcp\_secret` [\#2782](https://github.com/kubeflow/pipelines/pull/2782) ([Bobgy](https://github.com/Bobgy)) +- SDK - Bumping the upper version of Kubernetes package [\#2780](https://github.com/kubeflow/pipelines/pull/2780) ([Ark-kun](https://github.com/Ark-kun)) +- Delete namespaced-install.yaml [\#2603](https://github.com/kubeflow/pipelines/pull/2603) ([IronPan](https://github.com/IronPan)) + +## [0.1.39](https://github.com/kubeflow/pipelines/tree/0.1.39) (2020-01-06) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.38...0.1.39) + +**Merged pull requests:** + +- release 0.1.39 [\#2799](https://github.com/kubeflow/pipelines/pull/2799) ([gaoning777](https://github.com/gaoning777)) +- \[Testing\] Reduce distracting command log [\#2796](https://github.com/kubeflow/pipelines/pull/2796) ([Bobgy](https://github.com/Bobgy)) +- SKL - DSL - Stabilized the PipelineVolume names [\#2794](https://github.com/kubeflow/pipelines/pull/2794) ([Ark-kun](https://github.com/Ark-kun)) +- \[Testing\] Save pod desc text and stackdriver link as artifacts [\#2791](https://github.com/kubeflow/pipelines/pull/2791) ([Bobgy](https://github.com/Bobgy)) +- \[UI\] Show corresponding GKE cluster name [\#2789](https://github.com/kubeflow/pipelines/pull/2789) ([Bobgy](https://github.com/Bobgy)) +- \[Clean-up\] Remove accidentally merged yaml file [\#2788](https://github.com/kubeflow/pipelines/pull/2788) ([numerology](https://github.com/numerology)) +- \[Sample tests\] Upgrade GCP sdk to 272 in sample test [\#2786](https://github.com/kubeflow/pipelines/pull/2786) ([numerology](https://github.com/numerology)) +- \[Manifest\] Bump standalone deployment version in doc [\#2785](https://github.com/kubeflow/pipelines/pull/2785) ([numerology](https://github.com/numerology)) +- \[Components\] Roll forward of \#2697 for real. [\#2779](https://github.com/kubeflow/pipelines/pull/2779) ([numerology](https://github.com/numerology)) +- \[Manifest\] fix metadata configmap [\#2778](https://github.com/kubeflow/pipelines/pull/2778) ([numerology](https://github.com/numerology)) +- Use new K8s version to improve workload identity stability [\#2777](https://github.com/kubeflow/pipelines/pull/2777) ([Bobgy](https://github.com/Bobgy)) +- Fix vscode README of prettier integration [\#2776](https://github.com/kubeflow/pipelines/pull/2776) ([Bobgy](https://github.com/Bobgy)) +- \[Sample\] Clean up accidentally committed file [\#2775](https://github.com/kubeflow/pipelines/pull/2775) ([Bobgy](https://github.com/Bobgy)) +- \[UI\] Fix UI crash when invalid pipeline uploaded [\#2774](https://github.com/kubeflow/pipelines/pull/2774) ([Bobgy](https://github.com/Bobgy)) +- \[Standalone\] Fix GCP cloudsql manifest [\#2772](https://github.com/kubeflow/pipelines/pull/2772) ([Bobgy](https://github.com/Bobgy)) +- Update version in generate\_api.sh [\#2770](https://github.com/kubeflow/pipelines/pull/2770) ([jingzhang36](https://github.com/jingzhang36)) +- \[UI\] Succeed snackbar shouldn't show when there is an error [\#2766](https://github.com/kubeflow/pipelines/pull/2766) ([Bobgy](https://github.com/Bobgy)) +- Adjust the namespace for Delete/Terminate/Retry run APIs [\#2765](https://github.com/kubeflow/pipelines/pull/2765) ([gaoning777](https://github.com/gaoning777)) +- update release changelog [\#2763](https://github.com/kubeflow/pipelines/pull/2763) ([gaoning777](https://github.com/gaoning777)) +- \[Component\] Rollforward of PR\#2697 [\#2760](https://github.com/kubeflow/pipelines/pull/2760) ([numerology](https://github.com/numerology)) +- Add min and max lines for displaying json editor [\#2752](https://github.com/kubeflow/pipelines/pull/2752) ([fenglixa](https://github.com/fenglixa)) +- \[Frontend\] unit tests for node server [\#2745](https://github.com/kubeflow/pipelines/pull/2745) ([eterna2](https://github.com/eterna2)) +- Add step by step tutorial using mnist as use case [\#2716](https://github.com/kubeflow/pipelines/pull/2716) ([luotigerlsx](https://github.com/luotigerlsx)) +- \[SDK/compiler\] Sanitize op name for PipelineParam [\#2711](https://github.com/kubeflow/pipelines/pull/2711) ([numerology](https://github.com/numerology)) +- Support choosing tensorboard version from UI [\#2690](https://github.com/kubeflow/pipelines/pull/2690) ([dldaisy](https://github.com/dldaisy)) +- SDK - Unified the function signature parsing implementations [\#2689](https://github.com/kubeflow/pipelines/pull/2689) ([Ark-kun](https://github.com/Ark-kun)) +- support extra parameters for mysql connection [\#2668](https://github.com/kubeflow/pipelines/pull/2668) ([xaniasd](https://github.com/xaniasd)) +- End to end mnist pipeline use case [\#2628](https://github.com/kubeflow/pipelines/pull/2628) ([hougangliu](https://github.com/hougangliu)) + +## [0.1.38](https://github.com/kubeflow/pipelines/tree/0.1.38) (2019-12-19) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.37...0.1.38) + +**Merged pull requests:** + +- update manifest [\#2762](https://github.com/kubeflow/pipelines/pull/2762) ([gaoning777](https://github.com/gaoning777)) +- Release a8fcec5f702fc2528c87ed6fd698b9cfca8b509e [\#2761](https://github.com/kubeflow/pipelines/pull/2761) ([gaoning777](https://github.com/gaoning777)) +- Revert "Components - De-hardcoded the UI metadata file path in GCP co… [\#2759](https://github.com/kubeflow/pipelines/pull/2759) ([numerology](https://github.com/numerology)) +- Added alpha notice to READMEs for KFP Marketplace [\#2749](https://github.com/kubeflow/pipelines/pull/2749) ([sarahmaddox](https://github.com/sarahmaddox)) +- Move go license tools [\#2748](https://github.com/kubeflow/pipelines/pull/2748) ([Bobgy](https://github.com/Bobgy)) +- \[Test\] Pin the versions of coverage and coveralls [\#2740](https://github.com/kubeflow/pipelines/pull/2740) ([numerology](https://github.com/numerology)) +- SDK/DSL: Fix PipelineVolume name length [\#2739](https://github.com/kubeflow/pipelines/pull/2739) ([elikatsis](https://github.com/elikatsis)) +- Fix obsolete image cache when the same PR commit is tested with a new master [\#2738](https://github.com/kubeflow/pipelines/pull/2738) ([Bobgy](https://github.com/Bobgy)) +- Build - Fix building TF images [\#2736](https://github.com/kubeflow/pipelines/pull/2736) ([Ark-kun](https://github.com/Ark-kun)) +- Authorize other run api [\#2735](https://github.com/kubeflow/pipelines/pull/2735) ([gaoning777](https://github.com/gaoning777)) +- SDK - Bump version to 0.1.38 [\#2734](https://github.com/kubeflow/pipelines/pull/2734) ([Ark-kun](https://github.com/Ark-kun)) +- temporarily disable the jobservice in the multi-user mode [\#2733](https://github.com/kubeflow/pipelines/pull/2733) ([gaoning777](https://github.com/gaoning777)) +- Suggest run name based on the pipeline version used to create run [\#2731](https://github.com/kubeflow/pipelines/pull/2731) ([jingzhang36](https://github.com/jingzhang36)) +- \[Sample\] Add a notebook sample under parameterized\_tfx\_oss [\#2729](https://github.com/kubeflow/pipelines/pull/2729) ([numerology](https://github.com/numerology)) +- \[Doc\] Fix link validation complaint. [\#2727](https://github.com/kubeflow/pipelines/pull/2727) ([numerology](https://github.com/numerology)) +- \[UI\] Fix npm vulnerabilities [\#2724](https://github.com/kubeflow/pipelines/pull/2724) ([Bobgy](https://github.com/Bobgy)) +- Metadata: Adding metadata-grpc config map [\#2723](https://github.com/kubeflow/pipelines/pull/2723) ([dushyanthsc](https://github.com/dushyanthsc)) +- \[Doc\] Fix links in preload sample [\#2722](https://github.com/kubeflow/pipelines/pull/2722) ([numerology](https://github.com/numerology)) +- Format other frontend code using prettier [\#2717](https://github.com/kubeflow/pipelines/pull/2717) ([Bobgy](https://github.com/Bobgy)) +- update kfam service host env variable [\#2715](https://github.com/kubeflow/pipelines/pull/2715) ([gaoning777](https://github.com/gaoning777)) +- Fix combined unique keys [\#2712](https://github.com/kubeflow/pipelines/pull/2712) ([jingzhang36](https://github.com/jingzhang36)) +- Add samples to manage Azure Databricks in Kubeflow Pipelines [\#2709](https://github.com/kubeflow/pipelines/pull/2709) ([magencio](https://github.com/magencio)) +- Improve golang license CLI tools README [\#2707](https://github.com/kubeflow/pipelines/pull/2707) ([Bobgy](https://github.com/Bobgy)) +- Improve test cloudbuild yaml readability [\#2703](https://github.com/kubeflow/pipelines/pull/2703) ([Bobgy](https://github.com/Bobgy)) +- Components - De-hardcoded the UI metadata file path in GCP components [\#2697](https://github.com/kubeflow/pipelines/pull/2697) ([Ark-kun](https://github.com/Ark-kun)) +- \[Sample test\] Add parameterized\_tfx\_oss to 'normal' sample test corpus [\#2695](https://github.com/kubeflow/pipelines/pull/2695) ([numerology](https://github.com/numerology)) +- Separate run resources in namespaces [\#2694](https://github.com/kubeflow/pipelines/pull/2694) ([gaoning777](https://github.com/gaoning777)) +- SDK - Fixed the capitalization in \_python\_function\_name\_to\_component\_name [\#2688](https://github.com/kubeflow/pipelines/pull/2688) ([Ark-kun](https://github.com/Ark-kun)) +- 2682: Support a custom kube config location in K8sJobHelper [\#2683](https://github.com/kubeflow/pipelines/pull/2683) ([pahask8](https://github.com/pahask8)) +- TFjob v1 launcher [\#2677](https://github.com/kubeflow/pipelines/pull/2677) ([hougangliu](https://github.com/hougangliu)) +- \[UI\] Pass namespace to APIs [\#2676](https://github.com/kubeflow/pipelines/pull/2676) ([Bobgy](https://github.com/Bobgy)) +- Add pipeline version support to frontend [\#2667](https://github.com/kubeflow/pipelines/pull/2667) ([jingzhang36](https://github.com/jingzhang36)) +- Add ns in sdk [\#2665](https://github.com/kubeflow/pipelines/pull/2665) ([gaoning777](https://github.com/gaoning777)) +- Authorize create run requests [\#2663](https://github.com/kubeflow/pipelines/pull/2663) ([gaoning777](https://github.com/gaoning777)) +- \[manifest\] Added manifest for deploying on aws using s3 [\#2633](https://github.com/kubeflow/pipelines/pull/2633) ([eterna2](https://github.com/eterna2)) +- Migrate standalone deployment to workload identity on GCP [\#2619](https://github.com/kubeflow/pipelines/pull/2619) ([Bobgy](https://github.com/Bobgy)) +- SDK/Compiler - Preventing pipeline entrypoint template name from clashing with other template names [\#1555](https://github.com/kubeflow/pipelines/pull/1555) ([Ark-kun](https://github.com/Ark-kun)) + +## [0.1.37](https://github.com/kubeflow/pipelines/tree/0.1.37) (2019-12-05) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.36...0.1.37) + +**Merged pull requests:** + +- update server and sdk version to 0.1.37 [\#2701](https://github.com/kubeflow/pipelines/pull/2701) ([hongye-sun](https://github.com/hongye-sun)) +- release-b63472062bd80737c7f39e0eda901db0fe23a5e0 [\#2700](https://github.com/kubeflow/pipelines/pull/2700) ([hongye-sun](https://github.com/hongye-sun)) +- Store ns in db [\#2698](https://github.com/kubeflow/pipelines/pull/2698) ([gaoning777](https://github.com/gaoning777)) +- fix visualization-server doc link [\#2681](https://github.com/kubeflow/pipelines/pull/2681) ([rmgogogo](https://github.com/rmgogogo)) +- Add readable step id for test cloudbuild steps [\#2673](https://github.com/kubeflow/pipelines/pull/2673) ([Bobgy](https://github.com/Bobgy)) +- Configure jest to hide noisy message about no coverage data on .d.ts files [\#2672](https://github.com/kubeflow/pipelines/pull/2672) ([Bobgy](https://github.com/Bobgy)) +- Components - TFX [\#2671](https://github.com/kubeflow/pipelines/pull/2671) ([Ark-kun](https://github.com/Ark-kun)) +- \[Release\] update changelog [\#2666](https://github.com/kubeflow/pipelines/pull/2666) ([numerology](https://github.com/numerology)) +- Components - Filesystem [\#2659](https://github.com/kubeflow/pipelines/pull/2659) ([Ark-kun](https://github.com/Ark-kun)) +- Components - Git clone [\#2658](https://github.com/kubeflow/pipelines/pull/2658) ([Ark-kun](https://github.com/Ark-kun)) +- \[UI\] Get kubeflow namespace from kfp UI [\#2655](https://github.com/kubeflow/pipelines/pull/2655) ([Bobgy](https://github.com/Bobgy)) +- add namespace to the resourcereference [\#2651](https://github.com/kubeflow/pipelines/pull/2651) ([gaoning777](https://github.com/gaoning777)) +- Remove travis CI frontend tests [\#2647](https://github.com/kubeflow/pipelines/pull/2647) ([Bobgy](https://github.com/Bobgy)) +- Frontend - Add support for Minio artifact URIs [\#2645](https://github.com/kubeflow/pipelines/pull/2645) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Python support for arbitrary secret, similar to ".use\_gcp\_secret\('user-gcp-sa'\)" [\#2639](https://github.com/kubeflow/pipelines/pull/2639) ([NikeNano](https://github.com/NikeNano)) +- Open version api in BE proto and auto-generate BE/FE api methods [\#2620](https://github.com/kubeflow/pipelines/pull/2620) ([jingzhang36](https://github.com/jingzhang36)) + +## [0.1.36](https://github.com/kubeflow/pipelines/tree/0.1.36) (2019-11-26) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.35...0.1.36) + +**Merged pull requests:** + +- Bump version to 0.1.36 [\#2661](https://github.com/kubeflow/pipelines/pull/2661) ([numerology](https://github.com/numerology)) +- Release 9ad7d7dd9776ce75a83712f5723db2ef93ba5c26 [\#2660](https://github.com/kubeflow/pipelines/pull/2660) ([numerology](https://github.com/numerology)) +- Mkp better release - SemVersion [\#2656](https://github.com/kubeflow/pipelines/pull/2656) ([rmgogogo](https://github.com/rmgogogo)) +- api resource type conversion needs to convert pipeline version type [\#2648](https://github.com/kubeflow/pipelines/pull/2648) ([jingzhang36](https://github.com/jingzhang36)) +- SDK - Client - Fixed client on Windows [\#2646](https://github.com/kubeflow/pipelines/pull/2646) ([Ark-kun](https://github.com/Ark-kun)) +- Move frontend unit tests to prow [\#2637](https://github.com/kubeflow/pipelines/pull/2637) ([Bobgy](https://github.com/Bobgy)) +- SDK - Protobuf version of the component schema [\#2636](https://github.com/kubeflow/pipelines/pull/2636) ([Ark-kun](https://github.com/Ark-kun)) +- MDez patch 2 [\#2635](https://github.com/kubeflow/pipelines/pull/2635) ([OfficePop](https://github.com/OfficePop)) +- \[SDK\] Relax k8s sanitization [\#2634](https://github.com/kubeflow/pipelines/pull/2634) ([numerology](https://github.com/numerology)) +- Frontend server: add feature flag for kubeflow deployment [\#2631](https://github.com/kubeflow/pipelines/pull/2631) ([Bobgy](https://github.com/Bobgy)) +- \[UI\] Trim pipeline upload url before sending to backend [\#2630](https://github.com/kubeflow/pipelines/pull/2630) ([Bobgy](https://github.com/Bobgy)) +- Add support for user account auth with IAP [\#2626](https://github.com/kubeflow/pipelines/pull/2626) ([gaoning777](https://github.com/gaoning777)) +- \[UI\] Separate page state for each page instance [\#2622](https://github.com/kubeflow/pipelines/pull/2622) ([Bobgy](https://github.com/Bobgy)) +- Modernize samples/core/tfx-oss Notebook [\#2618](https://github.com/kubeflow/pipelines/pull/2618) ([ucdmkt](https://github.com/ucdmkt)) +- casting job\_config to QueryJobConfig if supplied by user [\#2616](https://github.com/kubeflow/pipelines/pull/2616) ([parthmishra](https://github.com/parthmishra)) +- Enable inverse proxy to access host network [\#2614](https://github.com/kubeflow/pipelines/pull/2614) ([IronPan](https://github.com/IronPan)) +- add community meeting/slack onto README [\#2613](https://github.com/kubeflow/pipelines/pull/2613) ([jessiezcc](https://github.com/jessiezcc)) +- Fix a few bugs in delete pipeline version that are discovered in integration test [\#2611](https://github.com/kubeflow/pipelines/pull/2611) ([jingzhang36](https://github.com/jingzhang36)) +- Adding missing kfp.cli.diagnose\_me package to setup.py Fixes \#2609 [\#2610](https://github.com/kubeflow/pipelines/pull/2610) ([stephenhenderson](https://github.com/stephenhenderson)) +- Changelog of 0.1.35 release [\#2608](https://github.com/kubeflow/pipelines/pull/2608) ([Bobgy](https://github.com/Bobgy)) +- Add Bobgy to OWNERS of kustomize manifests [\#2600](https://github.com/kubeflow/pipelines/pull/2600) ([Bobgy](https://github.com/Bobgy)) +- Fix error tfjob definition [\#2597](https://github.com/kubeflow/pipelines/pull/2597) ([hougangliu](https://github.com/hougangliu)) +- \[fix\] default yaml.dump to block style [\#2591](https://github.com/kubeflow/pipelines/pull/2591) ([l1990790120](https://github.com/l1990790120)) +- Katib experiment launcher [\#2577](https://github.com/kubeflow/pipelines/pull/2577) ([hougangliu](https://github.com/hougangliu)) +- Add a new field "TensorflowImage" to KFP viewer CRD file template. [\#2544](https://github.com/kubeflow/pipelines/pull/2544) ([jingzhang36](https://github.com/jingzhang36)) + +## [0.1.35](https://github.com/kubeflow/pipelines/tree/0.1.35) (2019-11-13) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.34...0.1.35) + +**Merged pull requests:** + +- Release 9670cc1aadfbbed9c52b84ea859ea97aa81213ad [\#2599](https://github.com/kubeflow/pipelines/pull/2599) ([Bobgy](https://github.com/Bobgy)) +- Bump version to 0.1.35 [\#2598](https://github.com/kubeflow/pipelines/pull/2598) ([Bobgy](https://github.com/Bobgy)) +- \[Sample\] Lint and clean up parameterized TFX sample [\#2594](https://github.com/kubeflow/pipelines/pull/2594) ([numerology](https://github.com/numerology)) +- Fix transformer sample message [\#2592](https://github.com/kubeflow/pipelines/pull/2592) ([hougangliu](https://github.com/hougangliu)) +- \[UI\] Simplify firefox fix in pull/2580 [\#2587](https://github.com/kubeflow/pipelines/pull/2587) ([Bobgy](https://github.com/Bobgy)) +- 0.1.34 changelog [\#2586](https://github.com/kubeflow/pipelines/pull/2586) ([jingzhang36](https://github.com/jingzhang36)) +- Fix missing run and pipeline id when buttons are clicked before content load [\#2584](https://github.com/kubeflow/pipelines/pull/2584) ([drewbutlerbb4](https://github.com/drewbutlerbb4)) +- Add a minimum sample to print hello world [\#2583](https://github.com/kubeflow/pipelines/pull/2583) ([IronPan](https://github.com/IronPan)) +- \[UI\] Fix firefox compatibility [\#2580](https://github.com/kubeflow/pipelines/pull/2580) ([Bobgy](https://github.com/Bobgy)) +- \[Doc\] Minor fix to MKP secret setup guide [\#2576](https://github.com/kubeflow/pipelines/pull/2576) ([numerology](https://github.com/numerology)) +- \[SDK\] Fix withItem loop [\#2572](https://github.com/kubeflow/pipelines/pull/2572) ([numerology](https://github.com/numerology)) +- fix slow query when get runs [\#2559](https://github.com/kubeflow/pipelines/pull/2559) ([wstian](https://github.com/wstian)) +- MDez patch 1 [\#2550](https://github.com/kubeflow/pipelines/pull/2550) ([OfficePop](https://github.com/OfficePop)) +- Components - Google Cloud Storage [\#2532](https://github.com/kubeflow/pipelines/pull/2532) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Fixed YAML formatting for some components [\#2529](https://github.com/kubeflow/pipelines/pull/2529) ([Ark-kun](https://github.com/Ark-kun)) +- Add arm64 support for ml-pipeline [\#2507](https://github.com/kubeflow/pipelines/pull/2507) ([MrXinWang](https://github.com/MrXinWang)) +- SDK - Containers - Renamed constructor parameter in the private ContainerBuilder class [\#2261](https://github.com/kubeflow/pipelines/pull/2261) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Containers - Getting rid of \_get\_default\_image\_builder [\#2255](https://github.com/kubeflow/pipelines/pull/2255) ([Ark-kun](https://github.com/Ark-kun)) +- \[pipeline-ui\] Retrieve pod logs from argo archive [\#2081](https://github.com/kubeflow/pipelines/pull/2081) ([eterna2](https://github.com/eterna2)) +- SDK/Components - Added Json Schema spec for the component format [\#669](https://github.com/kubeflow/pipelines/pull/669) ([Ark-kun](https://github.com/Ark-kun)) + +## [0.1.34](https://github.com/kubeflow/pipelines/tree/0.1.34) (2019-11-07) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.33...0.1.34) + +**Merged pull requests:** + +- Fix inverse-proxy test cloudbuild yaml [\#2568](https://github.com/kubeflow/pipelines/pull/2568) ([Bobgy](https://github.com/Bobgy)) +- Increase kfp version in kustomize and sdk init [\#2567](https://github.com/kubeflow/pipelines/pull/2567) ([jingzhang36](https://github.com/jingzhang36)) +- Fix inverse proxy agent in e2e tests [\#2566](https://github.com/kubeflow/pipelines/pull/2566) ([Bobgy](https://github.com/Bobgy)) +- Increase kfp version in component sdk [\#2565](https://github.com/kubeflow/pipelines/pull/2565) ([jingzhang36](https://github.com/jingzhang36)) +- Fixes test flakiness caused by "unsupported operand type\(s\) for -=: 'Retry' and 'int'" [\#2563](https://github.com/kubeflow/pipelines/pull/2563) ([Bobgy](https://github.com/Bobgy)) +- Fix postsubmit test which times out [\#2557](https://github.com/kubeflow/pipelines/pull/2557) ([Bobgy](https://github.com/Bobgy)) +- \[Testing\] Describe unhealthy pods when tests clean up. [\#2554](https://github.com/kubeflow/pipelines/pull/2554) ([Bobgy](https://github.com/Bobgy)) +- Release d6d9d8da19f7110fff3a5ba713710402edaeee65 [\#2553](https://github.com/kubeflow/pipelines/pull/2553) ([jingzhang36](https://github.com/jingzhang36)) +- Fix more notebook license issues [\#2552](https://github.com/kubeflow/pipelines/pull/2552) ([Bobgy](https://github.com/Bobgy)) +- Fix wrong kfserving url [\#2551](https://github.com/kubeflow/pipelines/pull/2551) ([hougangliu](https://github.com/hougangliu)) +- \[Misc\] Add releasing note [\#2549](https://github.com/kubeflow/pipelines/pull/2549) ([numerology](https://github.com/numerology)) +- \[Frontend\] Fix run id not populated in NewRun page when clicked too fast bug [\#2547](https://github.com/kubeflow/pipelines/pull/2547) ([Bobgy](https://github.com/Bobgy)) +- Fix broken license link for jupyter notebook [\#2546](https://github.com/kubeflow/pipelines/pull/2546) ([Bobgy](https://github.com/Bobgy)) +- Adding diagnose\_me to CLI tool [\#2543](https://github.com/kubeflow/pipelines/pull/2543) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- Update CHANGELOG.md [\#2541](https://github.com/kubeflow/pipelines/pull/2541) ([IronPan](https://github.com/IronPan)) +- Frontend format check should be before linting. [\#2525](https://github.com/kubeflow/pipelines/pull/2525) ([Bobgy](https://github.com/Bobgy)) +- SDK - Client - Fixed client on Windows [\#2524](https://github.com/kubeflow/pipelines/pull/2524) ([Ark-kun](https://github.com/Ark-kun)) +- Set DB GroupConcatMaxLen config to default of 4MB [\#2497](https://github.com/kubeflow/pipelines/pull/2497) ([krajasek](https://github.com/krajasek)) +- SDK - Client - Added API models to the generated API [\#2418](https://github.com/kubeflow/pipelines/pull/2418) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - DSL - Deprecated ArtifactLocation [\#2326](https://github.com/kubeflow/pipelines/pull/2326) ([Ark-kun](https://github.com/Ark-kun)) +- Add external links that make some content easier to discover [\#2068](https://github.com/kubeflow/pipelines/pull/2068) ([Bobgy](https://github.com/Bobgy)) + +## [0.1.33](https://github.com/kubeflow/pipelines/tree/0.1.33) (2019-11-02) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.32...0.1.33) + +**Merged pull requests:** + +- Update kustomization.yaml [\#2530](https://github.com/kubeflow/pipelines/pull/2530) ([IronPan](https://github.com/IronPan)) +- Update setup.py [\#2528](https://github.com/kubeflow/pipelines/pull/2528) ([IronPan](https://github.com/IronPan)) +- Update \_\_init\_\_.py [\#2527](https://github.com/kubeflow/pipelines/pull/2527) ([IronPan](https://github.com/IronPan)) +- \[Sample\] Align preload TFX sample with TFX head [\#2526](https://github.com/kubeflow/pipelines/pull/2526) ([numerology](https://github.com/numerology)) +- SDK - Compiler - Fixed failures on Jinja placeholders [\#2522](https://github.com/kubeflow/pipelines/pull/2522) ([Ark-kun](https://github.com/Ark-kun)) +- \[Test\] Fix TFX related Travis tests [\#2521](https://github.com/kubeflow/pipelines/pull/2521) ([numerology](https://github.com/numerology)) +- update location for logo [\#2520](https://github.com/kubeflow/pipelines/pull/2520) ([IronPan](https://github.com/IronPan)) +- \[MKP\] Reduce the logo image size. [\#2519](https://github.com/kubeflow/pipelines/pull/2519) ([numerology](https://github.com/numerology)) +- Frontend - Added support for https artifact links [\#2517](https://github.com/kubeflow/pipelines/pull/2517) ([Ark-kun](https://github.com/Ark-kun)) +- Pin tensorboard version to 1.13.2 [\#2513](https://github.com/kubeflow/pipelines/pull/2513) ([IronPan](https://github.com/IronPan)) +- clean up viewer crd spec [\#2511](https://github.com/kubeflow/pipelines/pull/2511) ([IronPan](https://github.com/IronPan)) +- remove unnecessary namespace in UI rolebinding spec [\#2510](https://github.com/kubeflow/pipelines/pull/2510) ([IronPan](https://github.com/IronPan)) +- \[UI\] Fix metadata tabs loading state [\#2508](https://github.com/kubeflow/pipelines/pull/2508) ([Bobgy](https://github.com/Bobgy)) +- Regenerate api since https://github.com/kubeflow/pipelines/pull/2445 changed api proto [\#2506](https://github.com/kubeflow/pipelines/pull/2506) ([jingzhang36](https://github.com/jingzhang36)) +- \[Sample\] Replace deprecated KubeflowRunner in TFX sample [\#2499](https://github.com/kubeflow/pipelines/pull/2499) ([numerology](https://github.com/numerology)) +- Samples - Renamed component build to container build [\#2496](https://github.com/kubeflow/pipelines/pull/2496) ([Ark-kun](https://github.com/Ark-kun)) +- \[Frontend\] Enlarge choose pipeline dialog to show more description [\#2494](https://github.com/kubeflow/pipelines/pull/2494) ([Bobgy](https://github.com/Bobgy)) +- update inverse proxy custom permission setting and role setting [\#2493](https://github.com/kubeflow/pipelines/pull/2493) ([rmgogogo](https://github.com/rmgogogo)) +- Fix pipeline description 255 characters length limit [\#2492](https://github.com/kubeflow/pipelines/pull/2492) ([Bobgy](https://github.com/Bobgy)) +- \[MKP\] Fix metadata DB configmap [\#2491](https://github.com/kubeflow/pipelines/pull/2491) ([numerology](https://github.com/numerology)) +- \[Sample\] Add the run ID place holder to TFX sample, fix metadb config in preload sample as well [\#2487](https://github.com/kubeflow/pipelines/pull/2487) ([numerology](https://github.com/numerology)) +- Fix inverse proxy matching regex [\#2486](https://github.com/kubeflow/pipelines/pull/2486) ([IronPan](https://github.com/IronPan)) +- update base image to fix the GKE GC issue for marketplace deployment [\#2484](https://github.com/kubeflow/pipelines/pull/2484) ([IronPan](https://github.com/IronPan)) +- better doc for MKP-KFP deployment [\#2481](https://github.com/kubeflow/pipelines/pull/2481) ([rmgogogo](https://github.com/rmgogogo)) +- \[Frontend\] Fix log viewer cannot scroll horizontally + other minor issues [\#2480](https://github.com/kubeflow/pipelines/pull/2480) ([Bobgy](https://github.com/Bobgy)) +- \[Sample\] Update pre-load TFX::OSS sample [\#2476](https://github.com/kubeflow/pipelines/pull/2476) ([numerology](https://github.com/numerology)) +- SDK - Python components - Fixed bug when mixing file outputs with return value outputs [\#2473](https://github.com/kubeflow/pipelines/pull/2473) ([Ark-kun](https://github.com/Ark-kun)) +- Update samples/core/ai\_platform pipeline to follow data dependency [\#2472](https://github.com/kubeflow/pipelines/pull/2472) ([ucdmkt](https://github.com/ucdmkt)) +- Add option to hide tensorboard artifact [\#2466](https://github.com/kubeflow/pipelines/pull/2466) ([hlu09](https://github.com/hlu09)) +- Release notes for 0.1.32 [\#2465](https://github.com/kubeflow/pipelines/pull/2465) ([hongye-sun](https://github.com/hongye-sun)) +- \[Frontend\] Update CONTRIBUTING.md with frontend code style info [\#2464](https://github.com/kubeflow/pipelines/pull/2464) ([Bobgy](https://github.com/Bobgy)) +- \[Frontend\] Check format in travis CI [\#2463](https://github.com/kubeflow/pipelines/pull/2463) ([Bobgy](https://github.com/Bobgy)) +- Format all source files under frontend/src using prettier [\#2462](https://github.com/kubeflow/pipelines/pull/2462) ([Bobgy](https://github.com/Bobgy)) +- \[Frontend\] UI shows specified task display name in PipelineDetail page [\#2459](https://github.com/kubeflow/pipelines/pull/2459) ([Bobgy](https://github.com/Bobgy)) +- clusterrole for pipeline-runner with seldondeployments [\#2458](https://github.com/kubeflow/pipelines/pull/2458) ([MingfeiPan](https://github.com/MingfeiPan)) +- Use string literals for reference resource relation and for reference resource type in frontend [\#2453](https://github.com/kubeflow/pipelines/pull/2453) ([jingzhang36](https://github.com/jingzhang36)) +- SDK - Components - Added type to graph input references [\#2451](https://github.com/kubeflow/pipelines/pull/2451) ([Ark-kun](https://github.com/Ark-kun)) +- Fix documentation for filter.proto [\#2447](https://github.com/kubeflow/pipelines/pull/2447) ([neuromage](https://github.com/neuromage)) +- \[Request for comments\] Add config for yapf and pylintrc [\#2446](https://github.com/kubeflow/pipelines/pull/2446) ([numerology](https://github.com/numerology)) +- Runs and jobs can be created from pipeline version [\#2445](https://github.com/kubeflow/pipelines/pull/2445) ([jingzhang36](https://github.com/jingzhang36)) +- Fix CustomTable.tsx layout problems [\#2444](https://github.com/kubeflow/pipelines/pull/2444) ([Bobgy](https://github.com/Bobgy)) +- Add --bind\_all option for tensorboard [\#2441](https://github.com/kubeflow/pipelines/pull/2441) ([daikeshi](https://github.com/daikeshi)) +- \[Test\] Fix post-submit test [\#2439](https://github.com/kubeflow/pipelines/pull/2439) ([numerology](https://github.com/numerology)) +- SDK - Client - Makes the create\_run output nicer [\#2438](https://github.com/kubeflow/pipelines/pull/2438) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Setup - Added cloudpickle to requirements [\#2437](https://github.com/kubeflow/pipelines/pull/2437) ([Ark-kun](https://github.com/Ark-kun)) +- Add owner files for marketplace deployment [\#2436](https://github.com/kubeflow/pipelines/pull/2436) ([IronPan](https://github.com/IronPan)) +- Update backend OWNERS [\#2435](https://github.com/kubeflow/pipelines/pull/2435) ([IronPan](https://github.com/IronPan)) +- Diagnose me dev env [\#2425](https://github.com/kubeflow/pipelines/pull/2425) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[Doc\] Update permission requirement in README [\#2422](https://github.com/kubeflow/pipelines/pull/2422) ([numerology](https://github.com/numerology)) +- Adding the core libraries for diagnose\_me tool. [\#2417](https://github.com/kubeflow/pipelines/pull/2417) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- \[Doc\] Minor doc update [\#2394](https://github.com/kubeflow/pipelines/pull/2394) ([numerology](https://github.com/numerology)) +- update link so that user can easily jump to CAIP after deployment [\#2377](https://github.com/kubeflow/pipelines/pull/2377) ([rmgogogo](https://github.com/rmgogogo)) +- \[Frontend\] Make links in pipeline description clickable [\#2376](https://github.com/kubeflow/pipelines/pull/2376) ([Bobgy](https://github.com/Bobgy)) +- \[Doc\] Fix a typo in MKP guide [\#2342](https://github.com/kubeflow/pipelines/pull/2342) ([numerology](https://github.com/numerology)) +- SDK/CLI: Implement kfp pipeline group [\#2340](https://github.com/kubeflow/pipelines/pull/2340) ([elikatsis](https://github.com/elikatsis)) +- SDK - Tests - Fixed most of the test warnings [\#2336](https://github.com/kubeflow/pipelines/pull/2336) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Refactoring - Split the K8sHelper class [\#2333](https://github.com/kubeflow/pipelines/pull/2333) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2332](https://github.com/kubeflow/pipelines/pull/2332) ([jay-saldanha](https://github.com/jay-saldanha)) +- Tests - Use base image for frontend tests [\#190](https://github.com/kubeflow/pipelines/pull/190) ([Ark-kun](https://github.com/Ark-kun)) + +## [0.1.32](https://github.com/kubeflow/pipelines/tree/0.1.32) (2019-10-18) + +[Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.31...0.1.32) + +**Merged pull requests:** + +- Update sdk release version [\#2434](https://github.com/kubeflow/pipelines/pull/2434) ([hongye-sun](https://github.com/hongye-sun)) +- Release e9b96de317989a9673ef88d88fb9dab9dac3005f [\#2433](https://github.com/kubeflow/pipelines/pull/2433) ([hongye-sun](https://github.com/hongye-sun)) +- \[Frontend\] Configure tslint for better DX [\#2431](https://github.com/kubeflow/pipelines/pull/2431) ([Bobgy](https://github.com/Bobgy)) +- \[Frontend\] Upgrade typescript to 3.6 [\#2428](https://github.com/kubeflow/pipelines/pull/2428) ([Bobgy](https://github.com/Bobgy)) +- SDK - DSL - Make is\_exit\_handler unnecessary in ContainerOp [\#2411](https://github.com/kubeflow/pipelines/pull/2411) ([Ark-kun](https://github.com/Ark-kun)) +- \[Frontend\] Prettier config to be consistent with existing code style [\#2409](https://github.com/kubeflow/pipelines/pull/2409) ([Bobgy](https://github.com/Bobgy)) +- tech writer edits [\#2403](https://github.com/kubeflow/pipelines/pull/2403) ([jay-saldanha](https://github.com/jay-saldanha)) +- \[Test/Sample test\] Fix model version in AI platform sample [\#2400](https://github.com/kubeflow/pipelines/pull/2400) ([numerology](https://github.com/numerology)) +- Update Watson ML default framework version [\#2398](https://github.com/kubeflow/pipelines/pull/2398) ([Tomcli](https://github.com/Tomcli)) +- Add Tomcli as kfs component reviewer [\#2396](https://github.com/kubeflow/pipelines/pull/2396) ([Tomcli](https://github.com/Tomcli)) +- License crawler for third party golang libraries [\#2393](https://github.com/kubeflow/pipelines/pull/2393) ([Bobgy](https://github.com/Bobgy)) +- quick custom spec fix [\#2390](https://github.com/kubeflow/pipelines/pull/2390) ([animeshsingh](https://github.com/animeshsingh)) +- add test config comment [\#2389](https://github.com/kubeflow/pipelines/pull/2389) ([gaoning777](https://github.com/gaoning777)) +- Remove jingzhang36 and rmgogogo from frontend reviewer list [\#2388](https://github.com/kubeflow/pipelines/pull/2388) ([Bobgy](https://github.com/Bobgy)) +- enable the check for dataflow [\#2387](https://github.com/kubeflow/pipelines/pull/2387) ([gaoning777](https://github.com/gaoning777)) +- Update samples/core/tfx-oss to tfx==0.14.0 and kfp=0.1.31 [\#2385](https://github.com/kubeflow/pipelines/pull/2385) ([ucdmkt](https://github.com/ucdmkt)) +- \[Sample\] Add back visualization in XGBoost sample [\#2384](https://github.com/kubeflow/pipelines/pull/2384) ([numerology](https://github.com/numerology)) +- move favicon path, root is override by inverse proxy [\#2382](https://github.com/kubeflow/pipelines/pull/2382) ([rmgogogo](https://github.com/rmgogogo)) +- \[Frontend\] Script and documentation to start a frontend dev env that works with all API endpoints [\#2381](https://github.com/kubeflow/pipelines/pull/2381) ([Bobgy](https://github.com/Bobgy)) +- add animesh to the approvers of the kfserving [\#2380](https://github.com/kubeflow/pipelines/pull/2380) ([gaoning777](https://github.com/gaoning777)) +- SDK - Added version [\#2374](https://github.com/kubeflow/pipelines/pull/2374) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2373](https://github.com/kubeflow/pipelines/pull/2373) ([jay-saldanha](https://github.com/jay-saldanha)) +- \[Samples\] Add numerology as samples/OWNERS [\#2371](https://github.com/kubeflow/pipelines/pull/2371) ([numerology](https://github.com/numerology)) +- \[Frontend\] Fix cannot copy logs in LogViewer when scrolling [\#2370](https://github.com/kubeflow/pipelines/pull/2370) ([Bobgy](https://github.com/Bobgy)) +- KFServing move to v1alpha2 [\#2369](https://github.com/kubeflow/pipelines/pull/2369) ([animeshsingh](https://github.com/animeshsingh)) +- Components - Updated the gcp dataproc create\_cluster component image [\#2366](https://github.com/kubeflow/pipelines/pull/2366) ([Ark-kun](https://github.com/Ark-kun)) +- \[Doc\] Fix some description of preload samples [\#2361](https://github.com/kubeflow/pipelines/pull/2361) ([numerology](https://github.com/numerology)) +- \[Sample\] Improve tfx oss sample [\#2360](https://github.com/kubeflow/pipelines/pull/2360) ([numerology](https://github.com/numerology)) +- add cloud-platform scope in the test to reclaim the ai platform sample models [\#2355](https://github.com/kubeflow/pipelines/pull/2355) ([gaoning777](https://github.com/gaoning777)) +- Fix potential issue of the ai platform sample when running it in the ai platform notebook [\#2349](https://github.com/kubeflow/pipelines/pull/2349) ([gaoning777](https://github.com/gaoning777)) +- Typo in Data passing in python components.ipynb [\#2347](https://github.com/kubeflow/pipelines/pull/2347) ([pingsutw](https://github.com/pingsutw)) +- \[Test\] Add unittest against TFX [\#2346](https://github.com/kubeflow/pipelines/pull/2346) ([numerology](https://github.com/numerology)) +- SDK - Python components - Fixed handling multiline decorators [\#2345](https://github.com/kubeflow/pipelines/pull/2345) ([Ark-kun](https://github.com/Ark-kun)) +- \[License\] Fix third-party license [\#2344](https://github.com/kubeflow/pipelines/pull/2344) ([numerology](https://github.com/numerology)) +- Fix pipeline cannot run bug when using marketplace managed storage [\#2341](https://github.com/kubeflow/pipelines/pull/2341) ([Bobgy](https://github.com/Bobgy)) +- enlarge MKP cluster constraint [\#2339](https://github.com/kubeflow/pipelines/pull/2339) ([rmgogogo](https://github.com/rmgogogo)) +- Add pipeline version api methods [\#2338](https://github.com/kubeflow/pipelines/pull/2338) ([jingzhang36](https://github.com/jingzhang36)) +- tech writer edits [\#2331](https://github.com/kubeflow/pipelines/pull/2331) ([jay-saldanha](https://github.com/jay-saldanha)) +- Add sample test for multiple output [\#2328](https://github.com/kubeflow/pipelines/pull/2328) ([gaoning777](https://github.com/gaoning777)) +- add ai\_platform test [\#2327](https://github.com/kubeflow/pipelines/pull/2327) ([gaoning777](https://github.com/gaoning777)) +- Tests - When testing SDK install it using pip [\#2325](https://github.com/kubeflow/pipelines/pull/2325) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2324](https://github.com/kubeflow/pipelines/pull/2324) ([jay-saldanha](https://github.com/jay-saldanha)) +- SDK - Compiler - Added the component spec annotations to the compiled workflow [\#2323](https://github.com/kubeflow/pipelines/pull/2323) ([Ark-kun](https://github.com/Ark-kun)) +- \[SDK/Compiler\] Add \_create\_and\_write\_workflow method [\#2321](https://github.com/kubeflow/pipelines/pull/2321) ([numerology](https://github.com/numerology)) +- \[Sample\] Add new TFX::OSS sample [\#2319](https://github.com/kubeflow/pipelines/pull/2319) ([numerology](https://github.com/numerology)) +- SDK - Containers - Made python package installation more robust [\#2316](https://github.com/kubeflow/pipelines/pull/2316) ([Ark-kun](https://github.com/Ark-kun)) +- Extend KFServing component with autoscaling and server mode [\#2315](https://github.com/kubeflow/pipelines/pull/2315) ([Tomcli](https://github.com/Tomcli)) +- SDK - Tests - Test creating component from the real AutoML pipeline [\#2314](https://github.com/kubeflow/pipelines/pull/2314) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2313](https://github.com/kubeflow/pipelines/pull/2313) ([jay-saldanha](https://github.com/jay-saldanha)) +- \[MKP/doc\] Update doc for changes of service account credential. [\#2309](https://github.com/kubeflow/pipelines/pull/2309) ([numerology](https://github.com/numerology)) +- \[MKP\] Remove service account credential from deployment page. [\#2308](https://github.com/kubeflow/pipelines/pull/2308) ([numerology](https://github.com/numerology)) +- SDK/DSL: ContainerOp.add\_pvolume - Fix volume passed in add\_volume [\#2306](https://github.com/kubeflow/pipelines/pull/2306) ([elikatsis](https://github.com/elikatsis)) +- \[Frontend\] Node detail view now can show workflow input/output artifacts [\#2305](https://github.com/kubeflow/pipelines/pull/2305) ([eterna2](https://github.com/eterna2)) +- SDK - Compiler - Fixed deprecation warning when calling compile [\#2303](https://github.com/kubeflow/pipelines/pull/2303) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2301](https://github.com/kubeflow/pipelines/pull/2301) ([jay-saldanha](https://github.com/jay-saldanha)) +- \[Component\] Add VPC Interface Endpoint Support for SageMaker [\#2299](https://github.com/kubeflow/pipelines/pull/2299) ([RedbackThomson](https://github.com/RedbackThomson)) +- SDK - Compiler - Fix bugs in the data passing rewriter [\#2297](https://github.com/kubeflow/pipelines/pull/2297) ([deepio-oc](https://github.com/deepio-oc)) +- Add CMLE deploy comment missing parameters [\#2296](https://github.com/kubeflow/pipelines/pull/2296) ([hongye-sun](https://github.com/hongye-sun)) +- Samples - Simplified pipeline submission code in samples [\#2293](https://github.com/kubeflow/pipelines/pull/2293) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Client - Added a way to set experiment name using environment variables [\#2292](https://github.com/kubeflow/pipelines/pull/2292) ([Ark-kun](https://github.com/Ark-kun)) +- tech writer edits [\#2291](https://github.com/kubeflow/pipelines/pull/2291) ([jay-saldanha](https://github.com/jay-saldanha)) +- \[MKP\] Fix gcr paths in values.yaml [\#2289](https://github.com/kubeflow/pipelines/pull/2289) ([numerology](https://github.com/numerology)) +- fix for MKP [\#2288](https://github.com/kubeflow/pipelines/pull/2288) ([rmgogogo](https://github.com/rmgogogo)) +- tech writer edits [\#2285](https://github.com/kubeflow/pipelines/pull/2285) ([jay-saldanha](https://github.com/jay-saldanha)) +- Disable cloudsql and update to v0.1.31 [\#2284](https://github.com/kubeflow/pipelines/pull/2284) ([rmgogogo](https://github.com/rmgogogo)) +- tech writer edits [\#2282](https://github.com/kubeflow/pipelines/pull/2282) ([jay-saldanha](https://github.com/jay-saldanha)) +- Remove usage of deprecated ContainerOp methods in use\_gcp\_secret [\#2280](https://github.com/kubeflow/pipelines/pull/2280) ([andrewsmartin](https://github.com/andrewsmartin)) +- Samples - Switched the build\_component sample to the new container API [\#2279](https://github.com/kubeflow/pipelines/pull/2279) ([Ark-kun](https://github.com/Ark-kun)) +- Components - Added the "AutoML Export data to GCS" component [\#2278](https://github.com/kubeflow/pipelines/pull/2278) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Deprecated build\_python\_component [\#2277](https://github.com/kubeflow/pipelines/pull/2277) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Containers - Deprecated build\_docker\_image [\#2276](https://github.com/kubeflow/pipelines/pull/2276) ([Ark-kun](https://github.com/Ark-kun)) +- Refactor resource op sample for sample test coverage [\#2274](https://github.com/kubeflow/pipelines/pull/2274) ([numerology](https://github.com/numerology)) +- SDK - Components - Creating graph components from python pipeline function [\#2273](https://github.com/kubeflow/pipelines/pull/2273) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Verify the object type when serializing primitive arguments [\#2272](https://github.com/kubeflow/pipelines/pull/2272) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Compiler - Make it possible to create more portable pipelines [\#2271](https://github.com/kubeflow/pipelines/pull/2271) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Reorganized TaskSpec execution options [\#2270](https://github.com/kubeflow/pipelines/pull/2270) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Fixed small bugs in graph component resolving [\#2269](https://github.com/kubeflow/pipelines/pull/2269) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Notebooks - Deprecated the docker magic [\#2266](https://github.com/kubeflow/pipelines/pull/2266) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - component\_ref.name should only be set when component was loaded by name [\#2265](https://github.com/kubeflow/pipelines/pull/2265) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Added the ComponentSpec.save method [\#2264](https://github.com/kubeflow/pipelines/pull/2264) ([Ark-kun](https://github.com/Ark-kun)) +- Components - Removed trailing whitespace from AutoML components code [\#2263](https://github.com/kubeflow/pipelines/pull/2263) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Restored attribute order when generating component.yaml files [\#2262](https://github.com/kubeflow/pipelines/pull/2262) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Lightweight - Convert the names of file inputs and outputs [\#2260](https://github.com/kubeflow/pipelines/pull/2260) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Compiler - Fixed small bug in data passing rewriter [\#2259](https://github.com/kubeflow/pipelines/pull/2259) ([Ark-kun](https://github.com/Ark-kun)) +- Samples - Added the data passing tutorial [\#2258](https://github.com/kubeflow/pipelines/pull/2258) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Components - Deprecate the get and set methods for default image in favor of plain variable [\#2257](https://github.com/kubeflow/pipelines/pull/2257) ([Ark-kun](https://github.com/Ark-kun)) +- SDK - Containers - Getting namespace lazily [\#2256](https://github.com/kubeflow/pipelines/pull/2256) ([Ark-kun](https://github.com/Ark-kun)) +- remove default namespace [\#2250](https://github.com/kubeflow/pipelines/pull/2250) ([SinaChavoshi](https://github.com/SinaChavoshi)) +- SDK - Lightweight - Added package installation support to func\_to\_container\_op [\#2245](https://github.com/kubeflow/pipelines/pull/2245) ([Ark-kun](https://github.com/Ark-kun)) +- SDK: fix label check for ContainerOP entities [\#2243](https://github.com/kubeflow/pipelines/pull/2243) ([solovyevt](https://github.com/solovyevt)) +- Update doc for MKP release [\#2242](https://github.com/kubeflow/pipelines/pull/2242) ([rmgogogo](https://github.com/rmgogogo)) +- Update changelog for release 0.1.31. \(And also for 0.1.30 and 0.1.29, whose are not added before\) [\#2232](https://github.com/kubeflow/pipelines/pull/2232) ([jingzhang36](https://github.com/jingzhang36)) +- SDK - Compiler - Move Argo volume specifications to templates [\#2229](https://github.com/kubeflow/pipelines/pull/2229) ([Ark-kun](https://github.com/Ark-kun)) +- Updated README Swagger CodeGen version [\#2228](https://github.com/kubeflow/pipelines/pull/2228) ([RedbackThomson](https://github.com/RedbackThomson)) +- SDK - Components - Fix - Stop serializing string values [\#2227](https://github.com/kubeflow/pipelines/pull/2227) ([Ark-kun](https://github.com/Ark-kun)) +- third\_party/metadata\_envoy: Modify license file [\#2224](https://github.com/kubeflow/pipelines/pull/2224) ([dushyanthsc](https://github.com/dushyanthsc)) +- \[SDK/Client\] Improve the url format check for kfp.Client [\#2222](https://github.com/kubeflow/pipelines/pull/2222) ([numerology](https://github.com/numerology)) +- \[Sample\] update XGBoost sample [\#2220](https://github.com/kubeflow/pipelines/pull/2220) ([numerology](https://github.com/numerology)) +- \[Component\] Add Managed Spot Training Support for SageMaker [\#2219](https://github.com/kubeflow/pipelines/pull/2219) ([RedbackThomson](https://github.com/RedbackThomson)) +- SDK - Containers - Added support for container image cache [\#2216](https://github.com/kubeflow/pipelines/pull/2216) ([Ark-kun](https://github.com/Ark-kun)) +- Add third party license + source code to argo and minio images to comply with their license [\#2201](https://github.com/kubeflow/pipelines/pull/2201) ([Bobgy](https://github.com/Bobgy)) +- SDK - Moved the \_container\_builder from kfp.compiler to kfp.containers [\#2192](https://github.com/kubeflow/pipelines/pull/2192) ([Ark-kun](https://github.com/Ark-kun)) +- Added the backend Go module cache to .gitignote [\#2190](https://github.com/kubeflow/pipelines/pull/2190) ([Ark-kun](https://github.com/Ark-kun)) +- Docs - Added the direct kfp module members to documentation [\#2183](https://github.com/kubeflow/pipelines/pull/2183) ([Ark-kun](https://github.com/Ark-kun)) +- Components - Added AutoML Tables components and tests [\#2174](https://github.com/kubeflow/pipelines/pull/2174) ([Ark-kun](https://github.com/Ark-kun)) +- GUI: should pop-up a correct info when delete more than one pipeline [\#2156](https://github.com/kubeflow/pipelines/pull/2156) ([QxiaoQ](https://github.com/QxiaoQ)) +- \[SDK-compiler\] Refactor Compiler to expose an API to write out yaml spec of pipeline. [\#2146](https://github.com/kubeflow/pipelines/pull/2146) ([numerology](https://github.com/numerology)) +- Add necessary data types to api and database to support pipeline version. [\#1873](https://github.com/kubeflow/pipelines/pull/1873) ([jingzhang36](https://github.com/jingzhang36)) +- SDK - Hiding Argo's workflow.uid placeholder behind DSL [\#1683](https://github.com/kubeflow/pipelines/pull/1683) ([Ark-kun](https://github.com/Ark-kun)) +- Allows uploading a pipeline in new run form [\#1643](https://github.com/kubeflow/pipelines/pull/1643) ([rileyjbauer](https://github.com/rileyjbauer)) +- SDK - Improve errors when ContainerOp.output is unavailable [\#1578](https://github.com/kubeflow/pipelines/pull/1578) ([Ark-kun](https://github.com/Ark-kun)) +- Use Remote Build Execution for Bazel builds. [\#1031](https://github.com/kubeflow/pipelines/pull/1031) ([neuromage](https://github.com/neuromage)) ## [0.1.31](https://github.com/kubeflow/pipelines/tree/0.1.31) (2019-09-25) [Full Changelog](https://github.com/kubeflow/pipelines/compare/0.1.30...0.1.31) @@ -1763,6 +2533,4 @@ - CRD for scheduling Argo workflows \(Implementation\) [\#3](https://github.com/kubeflow/pipelines/pull/3) ([vicaire](https://github.com/vicaire)) - CRD for scheduling Argo workflows \(API spec\) [\#2](https://github.com/kubeflow/pipelines/pull/2) ([vicaire](https://github.com/vicaire)) - - -\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)* \ No newline at end of file +\* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)* diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ebbb59e5310..01de7730903 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,6 +15,12 @@ You generally only need to submit a CLA once, so if you've already submitted one (even if it was for a different project), you probably don't need to do it again. +## Coding style + +The Python part of the project will follow [Google Python style guide](http://google.github.io/styleguide/pyguide.html). We provide a [yapf](https://github.com/google/yapf) configuration file to help contributors auto-format their code to adopt the Google Python style. Also, it is encouraged to lint python docstrings by [docformatter](https://github.com/myint/docformatter). + +The frontend part of the project uses [prettier](https://prettier.io/) for formatting, read [frontend/README.md#code-style](frontend/README.md#code-style) for more details. + ## Code reviews All submissions, including submissions by project members, require review. We @@ -25,4 +31,4 @@ information on using pull requests. ## Community Guidelines This project follows -[Google's Open Source Community Guidelines](https://opensource.google.com/conduct/). \ No newline at end of file +[Google's Open Source Community Guidelines](https://opensource.google.com/conduct/). diff --git a/README.md b/README.md index c7897fb18bb..860de972350 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,16 @@ See the Kubeflow [Pipelines API doc](https://www.kubeflow.org/docs/pipelines/ref Consult the [Python SDK reference docs](https://kubeflow-pipelines.readthedocs.io/en/latest/) when writing pipelines using the Python SDK. +## Kubeflow Pipelines Community Meeting + +The meeting is happening every other Wed 10-11AM (PST) +[Calendar Invite](https://calendar.google.com/event?action=TEMPLATE&tmeid=NTdoNG5uMDBtcnJlYmdlOWt1c2lkY25jdmlfMjAxOTExMTNUMTgwMDAwWiBqZXNzaWV6aHVAZ29vZ2xlLmNvbQ&tmsrc=jessiezhu%40google.com&scp=ALL) or [Join Meeting Directly](https://meet.google.com/phd-ixfj-kcr/) + +[Meeting notes](http://bit.ly/kfp-meeting-notes) + +## Kubeflow Pipelines Slack Channel +[#kubeflow-pipelines](https://kubeflow.slack.com) + ## Blog posts * [Getting started with Kubeflow Pipelines](https://cloud.google.com/blog/products/ai-machine-learning/getting-started-kubeflow-pipelines) (By Amy Unruh) diff --git a/VERSION b/VERSION new file mode 100644 index 00000000000..4b9fcbec101 --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +0.5.1 diff --git a/WORKSPACE b/WORKSPACE index 8d01b4204b1..89a0ae53c28 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -36,15 +36,6 @@ load("@bazel_gazelle//:deps.bzl", "gazelle_dependencies", "go_repository") gazelle_dependencies() -http_archive( - name = "org_tensorflow", - sha256 = "24570d860d87dcfb936f53fb8dd30302452d0aa6b8b8537e4555c1bf839121a6", - strip_prefix = "tensorflow-1.13.0-rc0", - urls = [ - "https://github.com/tensorflow/tensorflow/archive/v1.13.0-rc0.tar.gz", - ], -) - http_archive( name = "io_bazel_rules_closure", sha256 = "43c9b882fa921923bcba764453f4058d102bece35a37c9f6383c713004aacff1", @@ -55,10 +46,6 @@ http_archive( ], ) -load("@org_tensorflow//tensorflow:workspace.bzl", "tf_workspace") - -tf_workspace() - load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository") go_repository( @@ -151,8 +138,8 @@ http_archive( go_repository( name = "co_honnef_go_tools", + commit = "ea95bdfd59fc", importpath = "honnef.co/go/tools", - tag = "v0.0.1-2019.2.2", ) go_repository( @@ -467,12 +454,6 @@ go_repository( tag = "v4.2.1", ) -go_repository( - name = "com_github_kataras_iris", - importpath = "github.com/kataras/iris", - tag = "v10.6.7", -) - go_repository( name = "com_github_kisielk_gotool", importpath = "github.com/kisielk/gotool", @@ -524,7 +505,7 @@ go_repository( go_repository( name = "com_github_minio_minio_go", importpath = "github.com/minio/minio-go", - tag = "v6.0.5", + tag = "v6.0.14", ) go_repository( @@ -668,7 +649,7 @@ go_repository( go_repository( name = "com_github_stretchr_testify", importpath = "github.com/stretchr/testify", - tag = "v1.2.2", + tag = "v1.3.0", ) go_repository( @@ -782,7 +763,7 @@ go_repository( go_repository( name = "org_golang_x_crypto", - commit = "4def268fd1a4", + commit = "9756ffdc2472", importpath = "golang.org/x/crypto", ) @@ -795,7 +776,7 @@ go_repository( go_repository( name = "org_golang_x_net", build_file_proto_mode = "disable_global", - commit = "74dc4d7220e7", + commit = "ba9fcec4b297", importpath = "golang.org/x/net", ) @@ -813,7 +794,7 @@ go_repository( go_repository( name = "org_golang_x_sys", - commit = "fde4db37ae7a", + commit = "c7b8b68b1456", importpath = "golang.org/x/sys", ) @@ -831,7 +812,7 @@ go_repository( go_repository( name = "org_golang_x_tools", - commit = "922a4ee32d1a", + commit = "6cdbf07be9d0", importpath = "golang.org/x/tools", ) @@ -915,7 +896,7 @@ go_repository( go_repository( name = "com_github_google_pprof", - commit = "34ac40c74b70", + commit = "54271f7e092f", importpath = "github.com/google/pprof", ) @@ -1006,7 +987,7 @@ go_repository( go_repository( name = "com_github_kr_pty", importpath = "github.com/kr/pty", - tag = "v1.1.8", + tag = "v1.1.1", ) go_repository( @@ -1048,24 +1029,24 @@ go_repository( go_repository( name = "org_golang_google_api", importpath = "google.golang.org/api", - tag = "v0.8.0", + tag = "v0.9.0", ) go_repository( name = "org_golang_x_exp", - commit = "ec7cb31e5a56", + commit = "efd6b22b2522", importpath = "golang.org/x/exp", ) go_repository( name = "org_golang_x_image", - commit = "cff245a6509b", + commit = "0694c2d4d067", importpath = "golang.org/x/image", ) go_repository( name = "org_golang_x_mobile", - commit = "e8b3e6111d02", + commit = "d3739f865fa6", importpath = "golang.org/x/mobile", ) @@ -1080,3 +1061,21 @@ go_repository( commit = "a985d3407aa7", importpath = "golang.org/x/xerrors", ) + +go_repository( + name = "com_github_stretchr_objx", + importpath = "github.com/stretchr/objx", + tag = "v0.1.0", +) + +go_repository( + name = "io_k8s_sigs_testing_frameworks", + importpath = "sigs.k8s.io/testing_frameworks", + tag = "v0.1.1", +) + +go_repository( + name = "com_github_peterhellberg_duration", + commit = "ec6baeebcd10", + importpath = "github.com/peterhellberg/duration", +) diff --git a/backend/Dockerfile b/backend/Dockerfile index 22fc8eff523..747e3698658 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,4 +1,5 @@ -FROM l.gcr.io/google/bazel:0.24.0 as builder +ARG BAZEL_IMAGE=l.gcr.io/google/bazel:0.24.0 +FROM $BAZEL_IMAGE as builder RUN apt-get update && \ apt-get install -y cmake clang musl-dev openssl @@ -24,6 +25,16 @@ RUN if [ "$use_remote_build" = "true" ]; then \ # Compile FROM python:3.5 as compiler +RUN apt-get update -y && \ + apt-get install --no-install-recommends -y -q default-jdk python3-setuptools python3-dev +RUN wget https://bootstrap.pypa.io/get-pip.py && python3 get-pip.py +COPY backend/requirements.txt . +RUN python3 -m pip install -r requirements.txt + +# Downloading Argo CLI so that the samples are validated +#ADD https://github.com/argoproj/argo/releases/download/v2.7.5/argo-linux-amd64 /usr/local/bin/argo +ADD https://github.com/argoproj/argo/releases/download/v2.4.3/argo-linux-amd64 /usr/local/bin/argo +RUN chmod +x /usr/local/bin/argo WORKDIR /go/src/github.com/kubeflow/pipelines COPY sdk sdk @@ -33,30 +44,47 @@ RUN python3 setup.py install WORKDIR /samples COPY ./samples . -#We need to check that all samples have been compiled without error. -#For find program, the -exec argument is a filter predicate just like -name. It only affects whether the file is "found", not the find's exit code. -#One way to solve this problem is to check whether we have any python pipelines that cannot compile. Here the exit code is the number of such files: -#RUN bash -e -c 'exit $(find . -maxdepth 2 -name "*.py" ! -exec dsl-compile --py {} --output {}.tar.gz \; -print | wc -l)' -#I think it's better to just use a shell loop though. -#RUN for pipeline in $(find . -maxdepth 2 -name '*.py' -type f); do dsl-compile --py "$pipeline" --output "$pipeline.tar.gz"; done -#The "for" loop breaks on all whitespace, so we either need to override IFS or use the "read" command instead. -RUN set -e; find core -maxdepth 2 -name '*.py' -type f | while read pipeline; do dsl-compile --py "$pipeline" --output "$pipeline.tar.gz"; done +# We need to check that all samples have been compiled without error. +# For find program, the -exec argument is a filter predicate just like -name. It +# only affects whether the file is "found", not the find's exit code. +# One way to solve this problem is to check whether we have any python pipelines +# that cannot compile. Here the exit code is the number of such files: +# RUN bash -e -c 'exit $(find . -maxdepth 2 -name "*.py" ! -exec dsl-compile --py {} --output {}.tar.gz \; -print | wc -l)' +# I think it's better to just use a shell loop though. +# RUN for pipeline in $(find . -maxdepth 2 -name '*.py' -type f); do dsl-compile --py "$pipeline" --output "$pipeline.tar.gz"; done +# The "for" loop breaks on all whitespace, so we either need to override IFS or +# use the "read" command instead. +RUN line="import kfp;kfp.components.default_base_image_or_builder='gcr.io/google-appengine/python:2020-03-31-141326";\ + set -e; find core tutorials -maxdepth 2 -name '*.py' -type f | while read pipeline; do \ + awk -v text="$line" '!/^#/ && !p {print text; p=1} 1' "$pipeline" && \ + python3 "$pipeline"; \ + done FROM debian:stretch ARG COMMIT_SHA=unknown ENV COMMIT_SHA=${COMMIT_SHA} +ARG TAG_NAME=unknown +ENV TAG_NAME=${TAG_NAME} WORKDIR /bin COPY third_party/license.txt /bin/license.txt -COPY --from=builder /go/src/github.com/kubeflow/pipelines/bazel-bin/backend/src/apiserver/linux_amd64_stripped/apiserver /bin/apiserver +COPY --from=builder /go/src/github.com/kubeflow/pipelines/bazel-bin/backend/src/apiserver/ /usr/local/apiserver +RUN cp /usr/local/apiserver/linux_$(dpkg --print-architecture)_stripped/apiserver /bin/apiserver && \ + rm -rf /usr/local/apiserver COPY backend/src/apiserver/config/ /config COPY --from=compiler /samples/ /samples/ # Adding CA certificate so API server can download pipeline through URL -RUN apt-get update && apt-get install -y ca-certificates +RUN apt-get update && apt-get install -y ca-certificates \ + # wget is used for liveness/readiness probe command + wget + +# Pin sample doc links to the commit that built the backend image +RUN sed -E "s#/(blob|tree)/master/#/\1/${COMMIT_SHA}/#g" -i /config/sample_config.json && \ + sed -E "s/%252Fmaster/%252F${COMMIT_SHA}/#g" -i /config/sample_config.json # Expose apiserver port EXPOSE 8888 diff --git a/backend/Dockerfile.bazel b/backend/Dockerfile.bazel new file mode 100644 index 00000000000..b285e3582e0 --- /dev/null +++ b/backend/Dockerfile.bazel @@ -0,0 +1,30 @@ +FROM ubuntu:18.04 as builder + +RUN apt-get update \ + && apt-get install -y --no-install-recommends software-properties-common \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +RUN apt-get update \ + && apt-get install -y curl \ + openjdk-11-jdk \ + openjdk-11-jre-headless \ + pkg-config \ + zip \ + g++ \ + git \ + zlib1g-dev \ + unzip \ + python \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +ENV BAZEL_VERSION=0.24.0 + +RUN mkdir -p /bazel \ + && cd /bazel \ + && curl -fSsL -O https://github.com/bazelbuild/bazel/releases/download/$BAZEL_VERSION/bazel-$BAZEL_VERSION-dist.zip \ + && unzip bazel-$BAZEL_VERSION-dist.zip \ + && ./compile.sh \ + && cp output/bazel /usr/local/bin \ + && rm -rf /bazel diff --git a/backend/Dockerfile.cacheserver b/backend/Dockerfile.cacheserver new file mode 100644 index 00000000000..934ccfc7f1e --- /dev/null +++ b/backend/Dockerfile.cacheserver @@ -0,0 +1,20 @@ +# Dockerfile for building the source code of cache_server +FROM golang:1.11-alpine3.7 as builder + +RUN apk update && apk upgrade && \ + apk add --no-cache bash git openssh gcc musl-dev + +WORKDIR /go/src/github.com/kubeflow/pipelines +COPY . . + +RUN GO111MODULE=on go build -o /bin/cache_server backend/src/cache/*.go +RUN git clone https://github.com/hashicorp/golang-lru.git /kfp/cache/golang-lru/ + +FROM alpine:3.8 +WORKDIR /bin + +COPY --from=builder /bin/cache_server /bin/cache_server +COPY --from=builder /go/src/github.com/kubeflow/pipelines/third_party/license.txt /bin/license.txt +COPY --from=builder /kfp/cache/golang-lru/* /bin/golang-lru/ + +ENTRYPOINT [ "/bin/cache_server" ] \ No newline at end of file diff --git a/backend/Dockerfile.visualization b/backend/Dockerfile.visualization index 69069dbc26a..5992ca666f7 100644 --- a/backend/Dockerfile.visualization +++ b/backend/Dockerfile.visualization @@ -3,7 +3,7 @@ # visualization. More details about this process can be found in the server.py # and exporter.py files in the directory specified above. -# Copyright 2019 Google LLC +# Copyright 2019-2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,10 +17,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM tensorflow/tensorflow:1.13.1-py3 +FROM tensorflow/tensorflow:2.1.0-py3 RUN apt-get update \ - && apt-get install -y wget curl tar + && apt-get install -y wget curl tar \ + pkg-config libcairo2-dev libgirepository1.0-dev # For the pygobject and pycairo package setup due to licensing RUN curl https://dl.google.com/dl/cloudsdk/release/google-cloud-sdk.tar.gz > /tmp/google-cloud-sdk.tar.gz RUN mkdir -p /usr/local/gcloud @@ -36,9 +37,10 @@ RUN pip3 install -r requirements.txt COPY backend/src/apiserver/visualization/license.sh /src COPY backend/src/apiserver/visualization/third_party_licenses.csv /src +COPY backend/src/apiserver/visualization/third_party_licenses /usr/licenses RUN ./license.sh third_party_licenses.csv /usr/licenses COPY backend/src/apiserver/visualization /src -ENTRYPOINT [ "python3", "server.py" ] \ No newline at end of file +ENTRYPOINT [ "python3", "server.py" ] diff --git a/backend/OWNERS b/backend/OWNERS index dc4d9f39b85..f0ef80a590f 100644 --- a/backend/OWNERS +++ b/backend/OWNERS @@ -1,6 +1,10 @@ approvers: - IronPan + - jingzhang36 - neuromage + - rmgogogo reviewers: - IronPan + - jingzhang36 - neuromage + - rmgogogo diff --git a/backend/README.md b/backend/README.md index fbeec5520da..7a7c83d421e 100644 --- a/backend/README.md +++ b/backend/README.md @@ -43,3 +43,11 @@ If a new external Go dependency is added, or an existing one has its version bumped in the `go.mod` file, ensure the BUILD files pick this up by updating the WORKSPACE go_repository rules using the following command: `bazel run //:gazelle -- update-repos --from_file=go.mod` + +## Updating python dependencies + +[pip-tools](https://github.com/jazzband/pip-tools) is used to manage python +dependencies. To update dependencies, edit [requirements.in](requirements.in) +and run `./update_requirements.sh requirements.txt` to update and pin the transitive +dependencies. + diff --git a/backend/api/BUILD.bazel b/backend/api/BUILD.bazel index 053903f449e..73af36c2245 100644 --- a/backend/api/BUILD.bazel +++ b/backend/api/BUILD.bazel @@ -5,6 +5,7 @@ load("@com_github_grpc_ecosystem_grpc_gateway//protoc-gen-swagger:defs.bzl", "pr proto_library( name = "go_client_proto", srcs = [ + "auth.proto", "error.proto", "experiment.proto", "filter.proto", diff --git a/backend/api/auth.proto b/backend/api/auth.proto new file mode 100644 index 00000000000..28a27b3aa43 --- /dev/null +++ b/backend/api/auth.proto @@ -0,0 +1,86 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +option go_package = "github.com/kubeflow/pipelines/backend/api/go_client"; +package api; + +import "google/api/annotations.proto"; +import "google/protobuf/empty.proto"; +import "backend/api/error.proto"; +import "protoc-gen-swagger/options/annotations.proto"; + +option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { + responses: { + key: "default"; + value: { + schema: { + json_schema: { + ref: ".api.Status"; + } + } + } + } + // Use bearer token for authorizing access to job service. + // Kubernetes client library(https://kubernetes.io/docs/reference/using-api/client-libraries/) + // uses bearer token as default for authorization. The section below + // ensures security definition object is generated in the swagger definition. + // For more details see https://github.com/OAI/OpenAPI-Specification/blob/3.0.0/versions/2.0.md#securityDefinitionsObject + security_definitions: { + security: { + key: "Bearer"; + value: { + type: TYPE_API_KEY; + in: IN_HEADER; + name: "authorization"; + } + } + } + security: { + security_requirement: { + key: "Bearer"; + value: {}; + } + } +}; + +service AuthService { + rpc Authorize(AuthorizeRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + get: "/apis/v1beta1/auth" + }; + } +} + +// Ask for authorization of an access by providing resource's namespace, type +// and verb. User identity is not part of the message, because it is expected +// to be parsed from request headers. Caller should proxy user request's headers. +message AuthorizeRequest { + // Type of resources in pipelines system. + enum Resources { + UNASSIGNED_RESOURCES = 0; + VIEWERS = 1; + } + // Type of verbs that act on the resources. + enum Verb { + UNASSIGNED_VERB = 0; + CREATE = 1; + GET = 2; + DELETE = 3; + } + string namespace = 1; // Namespace the resource belongs to. + Resources resources = 2; // Resource type asking for authorization. + Verb verb = 3; // Verb on the resource asking for authorization. +} diff --git a/backend/api/build_kfp_server_api_python_package.sh b/backend/api/build_kfp_server_api_python_package.sh index 5703b55fbb7..44f73c704f1 100755 --- a/backend/api/build_kfp_server_api_python_package.sh +++ b/backend/api/build_kfp_server_api_python_package.sh @@ -35,7 +35,8 @@ if [ -z "$VERSION" ]; then fi codegen_file=/tmp/swagger-codegen-cli.jar -codegen_uri=http://central.maven.org/maven2/io/swagger/swagger-codegen-cli/2.4.7/swagger-codegen-cli-2.4.7.jar +# Browse all versions in: https://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.4.7/ +codegen_uri=https://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.4.7/swagger-codegen-cli-2.4.7.jar if ! [ -f "$codegen_file" ]; then wget --no-verbose "$codegen_uri" -O "$codegen_file" fi diff --git a/backend/api/experiment.proto b/backend/api/experiment.proto index ac925792fb3..0ca43bbbc89 100644 --- a/backend/api/experiment.proto +++ b/backend/api/experiment.proto @@ -17,11 +17,12 @@ syntax = "proto3"; option go_package = "github.com/kubeflow/pipelines/backend/api/go_client"; package api; +import "backend/api/error.proto"; +import "backend/api/resource_reference.proto"; import "google/api/annotations.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; import "protoc-gen-swagger/options/annotations.proto"; -import "backend/api/error.proto"; option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { responses: { @@ -58,6 +59,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { }; service ExperimentService { + //Create a new experiment. rpc CreateExperiment(CreateExperimentRequest) returns (Experiment) { option (google.api.http) = { post: "/apis/v1beta1/experiments" @@ -65,32 +67,49 @@ service ExperimentService { }; } + //Find a specific experiment by ID. rpc GetExperiment(GetExperimentRequest) returns (Experiment) { option (google.api.http) = { get: "/apis/v1beta1/experiments/{id}" }; } + //Find all experiments. rpc ListExperiment(ListExperimentsRequest) returns (ListExperimentsResponse) { option (google.api.http) = { get: "/apis/v1beta1/experiments" }; } + //Delete an experiment. rpc DeleteExperiment(DeleteExperimentRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/experiments/{id}" }; } + + //Archive an experiment. + rpc ArchiveExperiment(ArchiveExperimentRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/apis/v1beta1/experiments/{id}:archive" + }; + } + + //Restore an archived experiment. + rpc UnarchiveExperiment(UnarchiveExperimentRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/apis/v1beta1/experiments/{id}:unarchive" + }; + } } message CreateExperimentRequest { - // The experiment to be created + // The experiment to be created. Experiment experiment = 1; } message GetExperimentRequest { - // The ID of the experiment to be retrieved + // The ID of the experiment to be retrieved. string id = 1; } @@ -102,9 +121,15 @@ message ListExperimentsRequest { // Ascending by default. string sort_by = 3; - // A base-64 encoded, JSON-serialized Filter protocol buffer (see - // filter.proto). + // A url-encoded, JSON-serialized Filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/pipelines/ + // blob/master/backend/api/filter.proto)). string filter = 4; + + // What resource reference to filter on. + // For Experiment, the only valid resource type is Namespace. An sample query string could be + // resource_reference_key.type=NAMESPACE&resource_reference_key.id=ns1 + ResourceKey resource_reference_key = 5; } message ListExperimentsResponse { @@ -118,7 +143,7 @@ message ListExperimentsResponse { string next_page_token = 2; } -message DeleteExperimentRequest{ +message DeleteExperimentRequest { // The ID of the experiment to be deleted. string id = 1; } @@ -131,8 +156,28 @@ message Experiment { string name = 2; // Optional input field. Describing the purpose of the experiment - string description =3; + string description = 3; // Output. The time that the experiment created. google.protobuf.Timestamp created_at = 4; + + // Optional input field. Specify which resource this run belongs to. + // For Experiment, the only valid resource reference is a single Namespace. + repeated ResourceReference resource_references = 5; + + enum StorageState { + STORAGESTATE_UNSPECIFIED = 0; + STORAGESTATE_AVAILABLE = 1; + STORAGESTATE_ARCHIVED = 2; + } + + StorageState storage_state = 6; +} + +message ArchiveExperimentRequest { + string id = 1; +} + +message UnarchiveExperimentRequest { + string id = 1; } diff --git a/backend/api/generate_api.sh b/backend/api/generate_api.sh index 6b57e843757..8937e235aa5 100755 --- a/backend/api/generate_api.sh +++ b/backend/api/generate_api.sh @@ -30,8 +30,8 @@ GENERATED_GO_PROTO_FILES="${BAZEL_BINDIR}/backend/api/api_generated_go_sources/s # TODO this script should be able to be run from anywhere, not just within .../backend/api/ # Delete currently generated code. -rm -r -f go_http_client/* -rm -r -f go_client/* +rm -r -f ${DIR}/go_http_client/* +rm -r -f ${DIR}/go_client/* # Build required tools. bazel build @com_github_mbrukman_autogen//:autogen_tool @@ -51,13 +51,13 @@ done # Generate and copy back into source tree .swagger.json files. bazel build //backend/api:api_swagger -cp ${BAZEL_BINDIR}/backend/api/*.swagger.json swagger +cp ${BAZEL_BINDIR}/backend/api/*.swagger.json ${DIR}/swagger jq -s ' reduce .[] as $item ({}; . * $item) | .info.title = "Kubeflow Pipelines API" | .info.description = "This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition." | - .info.version = "0.1.20" + .info.version = "0.1.38" ' ${DIR}/swagger/{run,job,pipeline,experiment,pipeline.upload}.swagger.json > "${DIR}/swagger/kfp_api_single_file.swagger.json" # Generate Go HTTP client from the swagger files. @@ -67,7 +67,7 @@ ${SWAGGER_CMD} generate client \ --principal models.Principal \ -c job_client \ -m job_model \ - -t go_http_client + -t ${DIR}/go_http_client ${SWAGGER_CMD} generate client \ -f ${DIR}/swagger/run.swagger.json \ @@ -75,7 +75,7 @@ ${SWAGGER_CMD} generate client \ --principal models.Principal \ -c run_client \ -m run_model \ - -t go_http_client + -t ${DIR}/go_http_client ${SWAGGER_CMD} generate client \ -f ${DIR}/swagger/experiment.swagger.json \ @@ -83,7 +83,7 @@ ${SWAGGER_CMD} generate client \ --principal models.Principal \ -c experiment_client \ -m experiment_model \ - -t go_http_client + -t ${DIR}/go_http_client ${SWAGGER_CMD} generate client \ -f ${DIR}/swagger/pipeline.upload.swagger.json \ @@ -91,7 +91,7 @@ ${SWAGGER_CMD} generate client \ --principal models.Principal \ -c pipeline_upload_client \ -m pipeline_upload_model \ - -t go_http_client + -t ${DIR}/go_http_client ${SWAGGER_CMD} generate client \ -f ${DIR}/swagger/pipeline.swagger.json \ @@ -99,7 +99,7 @@ ${SWAGGER_CMD} generate client \ --principal models.Principal \ -c pipeline_client \ -m pipeline_model \ - -t go_http_client + -t ${DIR}/go_http_client ${SWAGGER_CMD} generate client \ -f ${DIR}/swagger/visualization.swagger.json \ @@ -107,7 +107,7 @@ ${SWAGGER_CMD} generate client \ --principal models.Principal \ -c visualization_client \ -m visualization_model \ - -t go_http_client + -t ${DIR}/go_http_client # Hack to fix an issue with go-swagger # See https://github.com/go-swagger/go-swagger/issues/1381 for details. diff --git a/backend/api/go_client/BUILD.bazel b/backend/api/go_client/BUILD.bazel index 07c7677beb2..58da7ef4764 100644 --- a/backend/api/go_client/BUILD.bazel +++ b/backend/api/go_client/BUILD.bazel @@ -3,6 +3,8 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = [ + "auth.pb.go", + "auth.pb.gw.go", "error.pb.go", "experiment.pb.go", "experiment.pb.gw.go", diff --git a/backend/api/go_client/auth.pb.go b/backend/api/go_client/auth.pb.go new file mode 100755 index 00000000000..c434d1bf924 --- /dev/null +++ b/backend/api/go_client/auth.pb.go @@ -0,0 +1,260 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: backend/api/auth.proto + +package go_client // import "github.com/kubeflow/pipelines/backend/api/go_client" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type AuthorizeRequest_Resources int32 + +const ( + AuthorizeRequest_UNASSIGNED_RESOURCES AuthorizeRequest_Resources = 0 + AuthorizeRequest_VIEWERS AuthorizeRequest_Resources = 1 +) + +var AuthorizeRequest_Resources_name = map[int32]string{ + 0: "UNASSIGNED_RESOURCES", + 1: "VIEWERS", +} +var AuthorizeRequest_Resources_value = map[string]int32{ + "UNASSIGNED_RESOURCES": 0, + "VIEWERS": 1, +} + +func (x AuthorizeRequest_Resources) String() string { + return proto.EnumName(AuthorizeRequest_Resources_name, int32(x)) +} +func (AuthorizeRequest_Resources) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_auth_b463ef3269931e86, []int{0, 0} +} + +type AuthorizeRequest_Verb int32 + +const ( + AuthorizeRequest_UNASSIGNED_VERB AuthorizeRequest_Verb = 0 + AuthorizeRequest_CREATE AuthorizeRequest_Verb = 1 + AuthorizeRequest_GET AuthorizeRequest_Verb = 2 + AuthorizeRequest_DELETE AuthorizeRequest_Verb = 3 +) + +var AuthorizeRequest_Verb_name = map[int32]string{ + 0: "UNASSIGNED_VERB", + 1: "CREATE", + 2: "GET", + 3: "DELETE", +} +var AuthorizeRequest_Verb_value = map[string]int32{ + "UNASSIGNED_VERB": 0, + "CREATE": 1, + "GET": 2, + "DELETE": 3, +} + +func (x AuthorizeRequest_Verb) String() string { + return proto.EnumName(AuthorizeRequest_Verb_name, int32(x)) +} +func (AuthorizeRequest_Verb) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_auth_b463ef3269931e86, []int{0, 1} +} + +type AuthorizeRequest struct { + Namespace string `protobuf:"bytes,1,opt,name=namespace,proto3" json:"namespace,omitempty"` + Resources AuthorizeRequest_Resources `protobuf:"varint,2,opt,name=resources,proto3,enum=api.AuthorizeRequest_Resources" json:"resources,omitempty"` + Verb AuthorizeRequest_Verb `protobuf:"varint,3,opt,name=verb,proto3,enum=api.AuthorizeRequest_Verb" json:"verb,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuthorizeRequest) Reset() { *m = AuthorizeRequest{} } +func (m *AuthorizeRequest) String() string { return proto.CompactTextString(m) } +func (*AuthorizeRequest) ProtoMessage() {} +func (*AuthorizeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_auth_b463ef3269931e86, []int{0} +} +func (m *AuthorizeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuthorizeRequest.Unmarshal(m, b) +} +func (m *AuthorizeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuthorizeRequest.Marshal(b, m, deterministic) +} +func (dst *AuthorizeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuthorizeRequest.Merge(dst, src) +} +func (m *AuthorizeRequest) XXX_Size() int { + return xxx_messageInfo_AuthorizeRequest.Size(m) +} +func (m *AuthorizeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AuthorizeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AuthorizeRequest proto.InternalMessageInfo + +func (m *AuthorizeRequest) GetNamespace() string { + if m != nil { + return m.Namespace + } + return "" +} + +func (m *AuthorizeRequest) GetResources() AuthorizeRequest_Resources { + if m != nil { + return m.Resources + } + return AuthorizeRequest_UNASSIGNED_RESOURCES +} + +func (m *AuthorizeRequest) GetVerb() AuthorizeRequest_Verb { + if m != nil { + return m.Verb + } + return AuthorizeRequest_UNASSIGNED_VERB +} + +func init() { + proto.RegisterType((*AuthorizeRequest)(nil), "api.AuthorizeRequest") + proto.RegisterEnum("api.AuthorizeRequest_Resources", AuthorizeRequest_Resources_name, AuthorizeRequest_Resources_value) + proto.RegisterEnum("api.AuthorizeRequest_Verb", AuthorizeRequest_Verb_name, AuthorizeRequest_Verb_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AuthServiceClient is the client API for AuthService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AuthServiceClient interface { + Authorize(ctx context.Context, in *AuthorizeRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type authServiceClient struct { + cc *grpc.ClientConn +} + +func NewAuthServiceClient(cc *grpc.ClientConn) AuthServiceClient { + return &authServiceClient{cc} +} + +func (c *authServiceClient) Authorize(ctx context.Context, in *AuthorizeRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/api.AuthService/Authorize", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AuthServiceServer is the server API for AuthService service. +type AuthServiceServer interface { + Authorize(context.Context, *AuthorizeRequest) (*empty.Empty, error) +} + +func RegisterAuthServiceServer(s *grpc.Server, srv AuthServiceServer) { + s.RegisterService(&_AuthService_serviceDesc, srv) +} + +func _AuthService_Authorize_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AuthorizeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AuthServiceServer).Authorize(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.AuthService/Authorize", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AuthServiceServer).Authorize(ctx, req.(*AuthorizeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AuthService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "api.AuthService", + HandlerType: (*AuthServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Authorize", + Handler: _AuthService_Authorize_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "backend/api/auth.proto", +} + +func init() { proto.RegisterFile("backend/api/auth.proto", fileDescriptor_auth_b463ef3269931e86) } + +var fileDescriptor_auth_b463ef3269931e86 = []byte{ + // 460 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x52, 0xc1, 0x6e, 0xd3, 0x40, + 0x14, 0x8c, 0x9d, 0x2a, 0xc1, 0x2f, 0x94, 0x9a, 0x6d, 0x29, 0x91, 0x09, 0x6a, 0x94, 0x53, 0x0f, + 0xd4, 0x56, 0xd3, 0x2b, 0x1c, 0x92, 0x76, 0x55, 0x55, 0x82, 0x22, 0xad, 0xd3, 0x20, 0xf5, 0x52, + 0xad, 0xdd, 0x17, 0x67, 0x55, 0xc7, 0x6b, 0xd6, 0xeb, 0x54, 0x70, 0x44, 0xe2, 0x03, 0x80, 0x4f, + 0xe3, 0x17, 0xf8, 0x10, 0xe4, 0x4d, 0x9a, 0x46, 0x90, 0xd3, 0x6a, 0xdf, 0xcc, 0x9b, 0x19, 0x69, + 0x1e, 0xec, 0x47, 0x3c, 0xbe, 0xc3, 0xec, 0x36, 0xe0, 0xb9, 0x08, 0x78, 0xa9, 0xa7, 0x7e, 0xae, + 0xa4, 0x96, 0xa4, 0xce, 0x73, 0xe1, 0x75, 0x12, 0x29, 0x93, 0x14, 0x17, 0x58, 0x96, 0x49, 0xcd, + 0xb5, 0x90, 0x59, 0xb1, 0xa0, 0x78, 0xaf, 0x96, 0xa8, 0xf9, 0x45, 0xe5, 0x24, 0xc0, 0x59, 0xae, + 0xbf, 0x2c, 0xc1, 0x97, 0xeb, 0xba, 0xa8, 0x94, 0x54, 0x4b, 0xe0, 0x8d, 0x79, 0xe2, 0xa3, 0x04, + 0xb3, 0xa3, 0xe2, 0x9e, 0x27, 0x09, 0xaa, 0x40, 0xe6, 0x46, 0xf7, 0x7f, 0x8f, 0xde, 0x0f, 0x1b, + 0xdc, 0x41, 0xa9, 0xa7, 0x52, 0x89, 0xaf, 0xc8, 0xf0, 0x73, 0x89, 0x85, 0x26, 0x1d, 0x70, 0x32, + 0x3e, 0xc3, 0x22, 0xe7, 0x31, 0xb6, 0xad, 0xae, 0x75, 0xe8, 0xb0, 0xc7, 0x01, 0x79, 0x07, 0x8e, + 0xc2, 0x42, 0x96, 0x2a, 0xc6, 0xa2, 0x6d, 0x77, 0xad, 0xc3, 0x67, 0xfd, 0x03, 0x9f, 0xe7, 0xc2, + 0xff, 0x57, 0xc7, 0x67, 0x0f, 0x34, 0xf6, 0xb8, 0x41, 0x7c, 0xd8, 0x9a, 0xa3, 0x8a, 0xda, 0x75, + 0xb3, 0xe9, 0x6d, 0xde, 0x1c, 0xa3, 0x8a, 0x98, 0xe1, 0xf5, 0xfa, 0xe0, 0xac, 0x74, 0x48, 0x1b, + 0xf6, 0xae, 0x2e, 0x07, 0x61, 0x78, 0x71, 0x7e, 0x49, 0xcf, 0x6e, 0x18, 0x0d, 0x3f, 0x5e, 0xb1, + 0x53, 0x1a, 0xba, 0x35, 0xd2, 0x82, 0xe6, 0xf8, 0x82, 0x7e, 0xa2, 0x2c, 0x74, 0xad, 0xde, 0x5b, + 0xd8, 0xaa, 0x14, 0xc8, 0x2e, 0xec, 0xac, 0xd1, 0xc7, 0x94, 0x0d, 0xdd, 0x1a, 0x01, 0x68, 0x9c, + 0x32, 0x3a, 0x18, 0x51, 0xd7, 0x22, 0x4d, 0xa8, 0x9f, 0xd3, 0x91, 0x6b, 0x57, 0xc3, 0x33, 0xfa, + 0x9e, 0x8e, 0xa8, 0x5b, 0xef, 0x23, 0xb4, 0xaa, 0x40, 0x21, 0xaa, 0xb9, 0x88, 0x91, 0x8c, 0xc1, + 0x59, 0xe5, 0x23, 0x2f, 0x36, 0xe6, 0xf5, 0xf6, 0xfd, 0x45, 0x57, 0xfe, 0x43, 0x57, 0x3e, 0xad, + 0xba, 0xea, 0x79, 0xdf, 0x7e, 0xff, 0xf9, 0x65, 0xef, 0x11, 0x52, 0xd5, 0x54, 0x04, 0xf3, 0xe3, + 0x08, 0x35, 0x3f, 0x36, 0x77, 0x30, 0xfc, 0x6e, 0xfd, 0x1c, 0x7c, 0x60, 0x1d, 0x68, 0xde, 0xe2, + 0x84, 0x97, 0xa9, 0x26, 0xcf, 0xc9, 0x0e, 0x6c, 0x7b, 0x2d, 0xe3, 0x10, 0x6a, 0xae, 0xcb, 0xe2, + 0xfa, 0x00, 0x5e, 0x43, 0x63, 0x88, 0x5c, 0xa1, 0x22, 0xbb, 0x4f, 0x6c, 0x6f, 0x9b, 0x2f, 0x9d, + 0x4d, 0x89, 0x5d, 0x3b, 0x7a, 0x0a, 0xb0, 0x22, 0xd4, 0xae, 0x4f, 0x12, 0xa1, 0xa7, 0x65, 0xe4, + 0xc7, 0x72, 0x16, 0xdc, 0x95, 0x11, 0x4e, 0x52, 0x79, 0x1f, 0xe4, 0x22, 0xc7, 0x54, 0x64, 0x58, + 0x04, 0xeb, 0x27, 0x93, 0xc8, 0x9b, 0x38, 0x15, 0x98, 0xe9, 0xa8, 0x61, 0x32, 0x9f, 0xfc, 0x0d, + 0x00, 0x00, 0xff, 0xff, 0x8e, 0x47, 0x2d, 0x41, 0xaa, 0x02, 0x00, 0x00, +} diff --git a/backend/api/go_client/auth.pb.gw.go b/backend/api/go_client/auth.pb.gw.go new file mode 100755 index 00000000000..41119c97f80 --- /dev/null +++ b/backend/api/go_client/auth.pb.gw.go @@ -0,0 +1,129 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: backend/api/auth.proto + +/* +Package go_client is a reverse proxy. + +It translates gRPC into RESTful JSON APIs. +*/ +package go_client + +import ( + "context" + "io" + "net/http" + + "github.com/golang/protobuf/proto" + "github.com/grpc-ecosystem/grpc-gateway/runtime" + "github.com/grpc-ecosystem/grpc-gateway/utilities" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/status" +) + +var _ codes.Code +var _ io.Reader +var _ status.Status +var _ = runtime.String +var _ = utilities.NewDoubleArray + +var ( + filter_AuthService_Authorize_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_AuthService_Authorize_0(ctx context.Context, marshaler runtime.Marshaler, client AuthServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq AuthorizeRequest + var metadata runtime.ServerMetadata + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_AuthService_Authorize_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.Authorize(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +// RegisterAuthServiceHandlerFromEndpoint is same as RegisterAuthServiceHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterAuthServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.Dial(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterAuthServiceHandler(ctx, mux, conn) +} + +// RegisterAuthServiceHandler registers the http handlers for service AuthService to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterAuthServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterAuthServiceHandlerClient(ctx, mux, NewAuthServiceClient(conn)) +} + +// RegisterAuthServiceHandlerClient registers the http handlers for service AuthService +// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "AuthServiceClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "AuthServiceClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "AuthServiceClient" to call the correct interceptors. +func RegisterAuthServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client AuthServiceClient) error { + + mux.Handle("GET", pattern_AuthService_Authorize_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_AuthService_Authorize_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_AuthService_Authorize_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +var ( + pattern_AuthService_Authorize_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "auth"}, "")) +) + +var ( + forward_AuthService_Authorize_0 = runtime.ForwardResponseMessage +) diff --git a/backend/api/go_client/error.pb.go b/backend/api/go_client/error.pb.go index 1c3f9eca01e..43b9a754200 100755 --- a/backend/api/go_client/error.pb.go +++ b/backend/api/go_client/error.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/error.pb.gw.go b/backend/api/go_client/error.pb.gw.go index 538896f6820..ee8aa8d0959 100755 --- a/backend/api/go_client/error.pb.gw.go +++ b/backend/api/go_client/error.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/experiment.pb.go b/backend/api/go_client/experiment.pb.go index fecf1747240..a352408cdfa 100755 --- a/backend/api/go_client/experiment.pb.go +++ b/backend/api/go_client/experiment.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -41,6 +41,32 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +type Experiment_StorageState int32 + +const ( + Experiment_STORAGESTATE_UNSPECIFIED Experiment_StorageState = 0 + Experiment_STORAGESTATE_AVAILABLE Experiment_StorageState = 1 + Experiment_STORAGESTATE_ARCHIVED Experiment_StorageState = 2 +) + +var Experiment_StorageState_name = map[int32]string{ + 0: "STORAGESTATE_UNSPECIFIED", + 1: "STORAGESTATE_AVAILABLE", + 2: "STORAGESTATE_ARCHIVED", +} +var Experiment_StorageState_value = map[string]int32{ + "STORAGESTATE_UNSPECIFIED": 0, + "STORAGESTATE_AVAILABLE": 1, + "STORAGESTATE_ARCHIVED": 2, +} + +func (x Experiment_StorageState) String() string { + return proto.EnumName(Experiment_StorageState_name, int32(x)) +} +func (Experiment_StorageState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_experiment_b177a3d23629b3a5, []int{5, 0} +} + type CreateExperimentRequest struct { Experiment *Experiment `protobuf:"bytes,1,opt,name=experiment,proto3" json:"experiment,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -52,7 +78,7 @@ func (m *CreateExperimentRequest) Reset() { *m = CreateExperimentRequest func (m *CreateExperimentRequest) String() string { return proto.CompactTextString(m) } func (*CreateExperimentRequest) ProtoMessage() {} func (*CreateExperimentRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_experiment_da829ac35991ed0d, []int{0} + return fileDescriptor_experiment_b177a3d23629b3a5, []int{0} } func (m *CreateExperimentRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreateExperimentRequest.Unmarshal(m, b) @@ -90,7 +116,7 @@ func (m *GetExperimentRequest) Reset() { *m = GetExperimentRequest{} } func (m *GetExperimentRequest) String() string { return proto.CompactTextString(m) } func (*GetExperimentRequest) ProtoMessage() {} func (*GetExperimentRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_experiment_da829ac35991ed0d, []int{1} + return fileDescriptor_experiment_b177a3d23629b3a5, []int{1} } func (m *GetExperimentRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetExperimentRequest.Unmarshal(m, b) @@ -118,20 +144,21 @@ func (m *GetExperimentRequest) GetId() string { } type ListExperimentsRequest struct { - PageToken string `protobuf:"bytes,1,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` - PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` - SortBy string `protobuf:"bytes,3,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` - Filter string `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + PageToken string `protobuf:"bytes,1,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + SortBy string `protobuf:"bytes,3,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` + Filter string `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"` + ResourceReferenceKey *ResourceKey `protobuf:"bytes,5,opt,name=resource_reference_key,json=resourceReferenceKey,proto3" json:"resource_reference_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *ListExperimentsRequest) Reset() { *m = ListExperimentsRequest{} } func (m *ListExperimentsRequest) String() string { return proto.CompactTextString(m) } func (*ListExperimentsRequest) ProtoMessage() {} func (*ListExperimentsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_experiment_da829ac35991ed0d, []int{2} + return fileDescriptor_experiment_b177a3d23629b3a5, []int{2} } func (m *ListExperimentsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListExperimentsRequest.Unmarshal(m, b) @@ -179,6 +206,13 @@ func (m *ListExperimentsRequest) GetFilter() string { return "" } +func (m *ListExperimentsRequest) GetResourceReferenceKey() *ResourceKey { + if m != nil { + return m.ResourceReferenceKey + } + return nil +} + type ListExperimentsResponse struct { Experiments []*Experiment `protobuf:"bytes,1,rep,name=experiments,proto3" json:"experiments,omitempty"` TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` @@ -192,7 +226,7 @@ func (m *ListExperimentsResponse) Reset() { *m = ListExperimentsResponse func (m *ListExperimentsResponse) String() string { return proto.CompactTextString(m) } func (*ListExperimentsResponse) ProtoMessage() {} func (*ListExperimentsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_experiment_da829ac35991ed0d, []int{3} + return fileDescriptor_experiment_b177a3d23629b3a5, []int{3} } func (m *ListExperimentsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListExperimentsResponse.Unmarshal(m, b) @@ -244,7 +278,7 @@ func (m *DeleteExperimentRequest) Reset() { *m = DeleteExperimentRequest func (m *DeleteExperimentRequest) String() string { return proto.CompactTextString(m) } func (*DeleteExperimentRequest) ProtoMessage() {} func (*DeleteExperimentRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_experiment_da829ac35991ed0d, []int{4} + return fileDescriptor_experiment_b177a3d23629b3a5, []int{4} } func (m *DeleteExperimentRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DeleteExperimentRequest.Unmarshal(m, b) @@ -272,20 +306,22 @@ func (m *DeleteExperimentRequest) GetId() string { } type Experiment struct { - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` - Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` - CreatedAt *timestamp.Timestamp `protobuf:"bytes,4,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + CreatedAt *timestamp.Timestamp `protobuf:"bytes,4,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + ResourceReferences []*ResourceReference `protobuf:"bytes,5,rep,name=resource_references,json=resourceReferences,proto3" json:"resource_references,omitempty"` + StorageState Experiment_StorageState `protobuf:"varint,6,opt,name=storage_state,json=storageState,proto3,enum=api.Experiment_StorageState" json:"storage_state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Experiment) Reset() { *m = Experiment{} } func (m *Experiment) String() string { return proto.CompactTextString(m) } func (*Experiment) ProtoMessage() {} func (*Experiment) Descriptor() ([]byte, []int) { - return fileDescriptor_experiment_da829ac35991ed0d, []int{5} + return fileDescriptor_experiment_b177a3d23629b3a5, []int{5} } func (m *Experiment) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Experiment.Unmarshal(m, b) @@ -333,6 +369,96 @@ func (m *Experiment) GetCreatedAt() *timestamp.Timestamp { return nil } +func (m *Experiment) GetResourceReferences() []*ResourceReference { + if m != nil { + return m.ResourceReferences + } + return nil +} + +func (m *Experiment) GetStorageState() Experiment_StorageState { + if m != nil { + return m.StorageState + } + return Experiment_STORAGESTATE_UNSPECIFIED +} + +type ArchiveExperimentRequest struct { + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ArchiveExperimentRequest) Reset() { *m = ArchiveExperimentRequest{} } +func (m *ArchiveExperimentRequest) String() string { return proto.CompactTextString(m) } +func (*ArchiveExperimentRequest) ProtoMessage() {} +func (*ArchiveExperimentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_experiment_b177a3d23629b3a5, []int{6} +} +func (m *ArchiveExperimentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ArchiveExperimentRequest.Unmarshal(m, b) +} +func (m *ArchiveExperimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ArchiveExperimentRequest.Marshal(b, m, deterministic) +} +func (dst *ArchiveExperimentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArchiveExperimentRequest.Merge(dst, src) +} +func (m *ArchiveExperimentRequest) XXX_Size() int { + return xxx_messageInfo_ArchiveExperimentRequest.Size(m) +} +func (m *ArchiveExperimentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ArchiveExperimentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ArchiveExperimentRequest proto.InternalMessageInfo + +func (m *ArchiveExperimentRequest) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +type UnarchiveExperimentRequest struct { + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UnarchiveExperimentRequest) Reset() { *m = UnarchiveExperimentRequest{} } +func (m *UnarchiveExperimentRequest) String() string { return proto.CompactTextString(m) } +func (*UnarchiveExperimentRequest) ProtoMessage() {} +func (*UnarchiveExperimentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_experiment_b177a3d23629b3a5, []int{7} +} +func (m *UnarchiveExperimentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UnarchiveExperimentRequest.Unmarshal(m, b) +} +func (m *UnarchiveExperimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UnarchiveExperimentRequest.Marshal(b, m, deterministic) +} +func (dst *UnarchiveExperimentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UnarchiveExperimentRequest.Merge(dst, src) +} +func (m *UnarchiveExperimentRequest) XXX_Size() int { + return xxx_messageInfo_UnarchiveExperimentRequest.Size(m) +} +func (m *UnarchiveExperimentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UnarchiveExperimentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UnarchiveExperimentRequest proto.InternalMessageInfo + +func (m *UnarchiveExperimentRequest) GetId() string { + if m != nil { + return m.Id + } + return "" +} + func init() { proto.RegisterType((*CreateExperimentRequest)(nil), "api.CreateExperimentRequest") proto.RegisterType((*GetExperimentRequest)(nil), "api.GetExperimentRequest") @@ -340,6 +466,9 @@ func init() { proto.RegisterType((*ListExperimentsResponse)(nil), "api.ListExperimentsResponse") proto.RegisterType((*DeleteExperimentRequest)(nil), "api.DeleteExperimentRequest") proto.RegisterType((*Experiment)(nil), "api.Experiment") + proto.RegisterType((*ArchiveExperimentRequest)(nil), "api.ArchiveExperimentRequest") + proto.RegisterType((*UnarchiveExperimentRequest)(nil), "api.UnarchiveExperimentRequest") + proto.RegisterEnum("api.Experiment_StorageState", Experiment_StorageState_name, Experiment_StorageState_value) } // Reference imports to suppress errors if they are not otherwise used. @@ -358,6 +487,8 @@ type ExperimentServiceClient interface { GetExperiment(ctx context.Context, in *GetExperimentRequest, opts ...grpc.CallOption) (*Experiment, error) ListExperiment(ctx context.Context, in *ListExperimentsRequest, opts ...grpc.CallOption) (*ListExperimentsResponse, error) DeleteExperiment(ctx context.Context, in *DeleteExperimentRequest, opts ...grpc.CallOption) (*empty.Empty, error) + ArchiveExperiment(ctx context.Context, in *ArchiveExperimentRequest, opts ...grpc.CallOption) (*empty.Empty, error) + UnarchiveExperiment(ctx context.Context, in *UnarchiveExperimentRequest, opts ...grpc.CallOption) (*empty.Empty, error) } type experimentServiceClient struct { @@ -404,12 +535,32 @@ func (c *experimentServiceClient) DeleteExperiment(ctx context.Context, in *Dele return out, nil } +func (c *experimentServiceClient) ArchiveExperiment(ctx context.Context, in *ArchiveExperimentRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/api.ExperimentService/ArchiveExperiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *experimentServiceClient) UnarchiveExperiment(ctx context.Context, in *UnarchiveExperimentRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/api.ExperimentService/UnarchiveExperiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // ExperimentServiceServer is the server API for ExperimentService service. type ExperimentServiceServer interface { CreateExperiment(context.Context, *CreateExperimentRequest) (*Experiment, error) GetExperiment(context.Context, *GetExperimentRequest) (*Experiment, error) ListExperiment(context.Context, *ListExperimentsRequest) (*ListExperimentsResponse, error) DeleteExperiment(context.Context, *DeleteExperimentRequest) (*empty.Empty, error) + ArchiveExperiment(context.Context, *ArchiveExperimentRequest) (*empty.Empty, error) + UnarchiveExperiment(context.Context, *UnarchiveExperimentRequest) (*empty.Empty, error) } func RegisterExperimentServiceServer(s *grpc.Server, srv ExperimentServiceServer) { @@ -488,6 +639,42 @@ func _ExperimentService_DeleteExperiment_Handler(srv interface{}, ctx context.Co return interceptor(ctx, in, info, handler) } +func _ExperimentService_ArchiveExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ArchiveExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).ArchiveExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.ExperimentService/ArchiveExperiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).ArchiveExperiment(ctx, req.(*ArchiveExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExperimentService_UnarchiveExperiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UnarchiveExperimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExperimentServiceServer).UnarchiveExperiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.ExperimentService/UnarchiveExperiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExperimentServiceServer).UnarchiveExperiment(ctx, req.(*UnarchiveExperimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + var _ExperimentService_serviceDesc = grpc.ServiceDesc{ ServiceName: "api.ExperimentService", HandlerType: (*ExperimentServiceServer)(nil), @@ -508,56 +695,79 @@ var _ExperimentService_serviceDesc = grpc.ServiceDesc{ MethodName: "DeleteExperiment", Handler: _ExperimentService_DeleteExperiment_Handler, }, + { + MethodName: "ArchiveExperiment", + Handler: _ExperimentService_ArchiveExperiment_Handler, + }, + { + MethodName: "UnarchiveExperiment", + Handler: _ExperimentService_UnarchiveExperiment_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "backend/api/experiment.proto", } func init() { - proto.RegisterFile("backend/api/experiment.proto", fileDescriptor_experiment_da829ac35991ed0d) -} - -var fileDescriptor_experiment_da829ac35991ed0d = []byte{ - // 652 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0x4d, 0x4f, 0xdb, 0x4c, - 0x10, 0x7e, 0xed, 0xf0, 0x06, 0x32, 0x79, 0x03, 0x2f, 0xdb, 0x8a, 0x04, 0x27, 0x94, 0xd4, 0x07, - 0x4a, 0xab, 0x62, 0x0b, 0x38, 0xb5, 0x37, 0x68, 0x51, 0xa5, 0xaa, 0x95, 0xaa, 0xc0, 0x89, 0x4b, - 0xb4, 0xb6, 0x27, 0x66, 0x85, 0xe3, 0x75, 0x77, 0xd7, 0x7c, 0x55, 0xbd, 0x54, 0xe2, 0xd8, 0x4b, - 0xf9, 0x5b, 0xbd, 0xf5, 0x2f, 0xf4, 0x87, 0x54, 0x5e, 0x3b, 0xc4, 0xe4, 0x43, 0x3d, 0x25, 0x33, - 0xf3, 0xec, 0xcc, 0xf3, 0xcc, 0x3e, 0x5e, 0xe8, 0x78, 0xd4, 0x3f, 0xc7, 0x38, 0x70, 0x69, 0xc2, - 0x5c, 0xbc, 0x4a, 0x50, 0xb0, 0x21, 0xc6, 0xca, 0x49, 0x04, 0x57, 0x9c, 0x54, 0x68, 0xc2, 0xac, - 0x4e, 0xc8, 0x79, 0x18, 0xa1, 0x46, 0xd0, 0x38, 0xe6, 0x8a, 0x2a, 0xc6, 0x63, 0x99, 0x43, 0xac, - 0x76, 0x51, 0xd5, 0x91, 0x97, 0x0e, 0x5c, 0x1c, 0x26, 0xea, 0xba, 0x28, 0x6e, 0x4e, 0x16, 0x15, - 0x1b, 0xa2, 0x54, 0x74, 0x98, 0x14, 0x80, 0x97, 0xfa, 0xc7, 0xdf, 0x09, 0x31, 0xde, 0x91, 0x97, - 0x34, 0x0c, 0x51, 0xb8, 0x3c, 0xd1, 0xfd, 0x67, 0xcc, 0x6a, 0x3e, 0x20, 0x2b, 0x04, 0x17, 0x79, - 0xc1, 0x7e, 0x0f, 0xcd, 0x37, 0x02, 0xa9, 0xc2, 0xa3, 0x7b, 0x05, 0x3d, 0xfc, 0x9c, 0xa2, 0x54, - 0xc4, 0x05, 0x18, 0xcb, 0x6a, 0x19, 0x5d, 0x63, 0xbb, 0xbe, 0xb7, 0xe2, 0xd0, 0x84, 0x39, 0x25, - 0x6c, 0x09, 0x62, 0x6f, 0xc1, 0xe3, 0x77, 0xa8, 0xa6, 0x1b, 0x2d, 0x83, 0xc9, 0x02, 0xdd, 0xa0, - 0xd6, 0x33, 0x59, 0x60, 0xdf, 0x1a, 0xb0, 0xf6, 0x81, 0xc9, 0x12, 0x52, 0x8e, 0xa0, 0x1b, 0x00, - 0x09, 0x0d, 0xb1, 0xaf, 0xf8, 0x39, 0xc6, 0xc5, 0x91, 0x5a, 0x96, 0x39, 0xc9, 0x12, 0xa4, 0x0d, - 0x3a, 0xe8, 0x4b, 0x76, 0x83, 0x2d, 0xb3, 0x6b, 0x6c, 0xff, 0xdb, 0x5b, 0xca, 0x12, 0xc7, 0xec, - 0x06, 0x49, 0x13, 0x16, 0x25, 0x17, 0xaa, 0xef, 0x5d, 0xb7, 0x2a, 0xfa, 0x60, 0x35, 0x0b, 0x0f, - 0xaf, 0xc9, 0x1a, 0x54, 0x07, 0x2c, 0x52, 0x28, 0x5a, 0x0b, 0x79, 0x3e, 0x8f, 0xec, 0x3b, 0x03, - 0x9a, 0x53, 0x3c, 0x64, 0xc2, 0x63, 0x89, 0x64, 0x17, 0xea, 0x63, 0x65, 0xb2, 0x65, 0x74, 0x2b, - 0xb3, 0xd4, 0x97, 0x31, 0x19, 0x77, 0xc5, 0x15, 0x8d, 0x72, 0x76, 0x15, 0xcd, 0xae, 0xa6, 0x33, - 0x9a, 0xde, 0x16, 0xac, 0xc4, 0x78, 0xa5, 0xfa, 0x25, 0x7d, 0xa6, 0xa6, 0xd3, 0xc8, 0xd2, 0x9f, - 0x46, 0x1a, 0xed, 0xe7, 0xd0, 0x7c, 0x8b, 0x11, 0xce, 0xba, 0x91, 0xc9, 0x45, 0x7e, 0x37, 0x00, - 0xc6, 0xa8, 0xc9, 0x32, 0x21, 0xb0, 0x10, 0xd3, 0x21, 0x16, 0x63, 0xf4, 0x7f, 0xd2, 0x85, 0x7a, - 0x80, 0xd2, 0x17, 0x4c, 0x5b, 0xa5, 0x58, 0x54, 0x39, 0x45, 0x5e, 0x01, 0xf8, 0xda, 0x11, 0x41, - 0x9f, 0x2a, 0xbd, 0xb1, 0xfa, 0x9e, 0xe5, 0xe4, 0x76, 0x74, 0x46, 0x76, 0x74, 0x4e, 0x46, 0x76, - 0xec, 0xd5, 0x0a, 0xf4, 0x81, 0xda, 0xfb, 0x59, 0x81, 0xd5, 0x31, 0x9f, 0x63, 0x14, 0x17, 0xcc, - 0x47, 0x92, 0xc0, 0xff, 0x93, 0x16, 0x23, 0x1d, 0xbd, 0xc9, 0x39, 0xce, 0xb3, 0x26, 0xf7, 0x6c, - 0xef, 0x7c, 0xfb, 0xf5, 0xfb, 0xce, 0x7c, 0x66, 0xaf, 0x67, 0xfe, 0x95, 0xee, 0xc5, 0xae, 0x87, - 0x8a, 0xee, 0x96, 0xbe, 0x3a, 0xf9, 0xba, 0x64, 0x44, 0xe2, 0x43, 0xe3, 0x81, 0x11, 0xc9, 0xba, - 0x6e, 0x38, 0xcb, 0x9c, 0xd3, 0xb3, 0xb6, 0xf4, 0xac, 0x2e, 0x79, 0x32, 0x77, 0x96, 0xfb, 0x85, - 0x05, 0x5f, 0x49, 0x0c, 0xcb, 0x0f, 0xcd, 0x43, 0xda, 0xba, 0xd5, 0x6c, 0x67, 0x5b, 0x9d, 0xd9, - 0xc5, 0xdc, 0x6e, 0xf6, 0x53, 0x3d, 0xb4, 0x4d, 0xe6, 0x0b, 0xcc, 0xd6, 0x38, 0xe9, 0x8b, 0x62, - 0x8d, 0x73, 0xec, 0x62, 0xad, 0x4d, 0xdd, 0xda, 0x51, 0xf6, 0xc2, 0x8c, 0x14, 0xbe, 0xf8, 0x8b, - 0xc2, 0xc3, 0x5b, 0xe3, 0xc7, 0xc1, 0xc7, 0x5e, 0x07, 0x16, 0x03, 0x1c, 0xd0, 0x34, 0x52, 0x64, - 0x95, 0xac, 0x40, 0xc3, 0xaa, 0xeb, 0xa1, 0xc7, 0x8a, 0xaa, 0x54, 0x9e, 0x6e, 0xc2, 0x06, 0x54, - 0x0f, 0x91, 0x0a, 0x14, 0xe4, 0xd1, 0x92, 0x69, 0x35, 0x68, 0xaa, 0xce, 0xb8, 0x60, 0x37, 0xfa, - 0xf9, 0xe9, 0x9a, 0xde, 0x7f, 0x00, 0xf7, 0x80, 0x7f, 0x4e, 0xf7, 0x43, 0xa6, 0xce, 0x52, 0xcf, - 0xf1, 0xf9, 0xd0, 0x3d, 0x4f, 0x3d, 0x1c, 0x44, 0xfc, 0xd2, 0x4d, 0x58, 0x82, 0x11, 0x8b, 0x51, - 0xba, 0xe5, 0x57, 0x2a, 0xe4, 0x7d, 0x3f, 0x62, 0x18, 0x2b, 0xaf, 0xaa, 0xf9, 0xef, 0xff, 0x09, - 0x00, 0x00, 0xff, 0xff, 0x4a, 0x7b, 0xe8, 0x91, 0x72, 0x05, 0x00, 0x00, + proto.RegisterFile("backend/api/experiment.proto", fileDescriptor_experiment_b177a3d23629b3a5) +} + +var fileDescriptor_experiment_b177a3d23629b3a5 = []byte{ + // 889 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0x51, 0x53, 0xdb, 0x46, + 0x10, 0x8e, 0x4c, 0x70, 0xc2, 0x1a, 0x83, 0x39, 0x52, 0x5b, 0x08, 0x53, 0x5c, 0x4d, 0x87, 0xba, + 0x4c, 0xb0, 0x0a, 0x79, 0x6a, 0xde, 0x0c, 0x38, 0x94, 0x86, 0xb6, 0x19, 0xd9, 0xc9, 0x43, 0x5e, + 0x3c, 0x67, 0x79, 0x6d, 0x6e, 0xb0, 0x75, 0xea, 0xdd, 0x89, 0xc4, 0x74, 0x3a, 0xd3, 0xe9, 0x4c, + 0xff, 0x40, 0xf3, 0xb3, 0x3a, 0x7d, 0xea, 0x5f, 0xe8, 0xef, 0xe8, 0x74, 0x74, 0x96, 0x41, 0xb6, + 0xec, 0x84, 0x27, 0xb8, 0xdd, 0xcf, 0xb7, 0xf7, 0x7d, 0xfb, 0xed, 0x0a, 0xca, 0x1d, 0xea, 0x5d, + 0xa1, 0xdf, 0x75, 0x68, 0xc0, 0x1c, 0x7c, 0x1f, 0xa0, 0x60, 0x43, 0xf4, 0x55, 0x2d, 0x10, 0x5c, + 0x71, 0xb2, 0x44, 0x03, 0x66, 0x95, 0xa6, 0x20, 0x42, 0x70, 0x31, 0xce, 0x5a, 0x5f, 0x26, 0x13, + 0x02, 0x25, 0x0f, 0x85, 0x87, 0x6d, 0x81, 0x3d, 0x14, 0xe8, 0x7b, 0x18, 0xa3, 0xca, 0x7d, 0xce, + 0xfb, 0x03, 0xd4, 0x20, 0xea, 0xfb, 0x5c, 0x51, 0xc5, 0xb8, 0x2f, 0xe3, 0xec, 0x76, 0x9c, 0xd5, + 0xa7, 0x4e, 0xd8, 0x73, 0x70, 0x18, 0xa8, 0x51, 0x9c, 0xdc, 0x9d, 0x4d, 0x2a, 0x36, 0x44, 0xa9, + 0xe8, 0x30, 0x88, 0x01, 0x4f, 0xf5, 0x1f, 0xef, 0xa0, 0x8f, 0xfe, 0x81, 0x7c, 0x47, 0xfb, 0x7d, + 0x14, 0x0e, 0x0f, 0xf4, 0xfd, 0xe9, 0x5a, 0xf6, 0xf7, 0x50, 0x3a, 0x11, 0x48, 0x15, 0x36, 0x6e, + 0x79, 0xba, 0xf8, 0x73, 0x88, 0x52, 0x11, 0x07, 0xe0, 0x8e, 0xbc, 0x69, 0x54, 0x8c, 0x6a, 0xee, + 0x68, 0xbd, 0x46, 0x03, 0x56, 0x4b, 0x60, 0x13, 0x10, 0x7b, 0x0f, 0x9e, 0x9c, 0xa1, 0x4a, 0x5f, + 0xb4, 0x06, 0x19, 0xd6, 0xd5, 0x17, 0xac, 0xb8, 0x19, 0xd6, 0xb5, 0xff, 0x36, 0xa0, 0x78, 0xc1, + 0x64, 0x02, 0x29, 0x27, 0xd0, 0x1d, 0x80, 0x80, 0xf6, 0xb1, 0xad, 0xf8, 0x15, 0xfa, 0xf1, 0x4f, + 0x56, 0xa2, 0x48, 0x2b, 0x0a, 0x90, 0x6d, 0xd0, 0x87, 0xb6, 0x64, 0x37, 0x68, 0x66, 0x2a, 0x46, + 0x75, 0xd9, 0x7d, 0x1c, 0x05, 0x9a, 0xec, 0x06, 0x49, 0x09, 0x1e, 0x49, 0x2e, 0x54, 0xbb, 0x33, + 0x32, 0x97, 0xf4, 0x0f, 0xb3, 0xd1, 0xf1, 0x78, 0x44, 0x8a, 0x90, 0xed, 0xb1, 0x81, 0x42, 0x61, + 0x3e, 0x1c, 0xc7, 0xc7, 0x27, 0xf2, 0x02, 0x8a, 0xe9, 0x0e, 0xb5, 0xaf, 0x70, 0x64, 0x2e, 0x6b, + 0xb2, 0x05, 0x4d, 0xd6, 0x8d, 0x21, 0x2f, 0x71, 0xe4, 0x3e, 0x99, 0xe0, 0xdd, 0x09, 0xfc, 0x25, + 0x8e, 0xec, 0x0f, 0x06, 0x94, 0x52, 0x7c, 0x64, 0xc0, 0x7d, 0x89, 0xe4, 0x10, 0x72, 0x77, 0x0a, + 0x49, 0xd3, 0xa8, 0x2c, 0xcd, 0x53, 0x31, 0x89, 0x89, 0x34, 0x50, 0x5c, 0xd1, 0xc1, 0x98, 0xe5, + 0x92, 0x66, 0xb9, 0xa2, 0x23, 0x9a, 0xe6, 0x1e, 0xac, 0xfb, 0xf8, 0x5e, 0xb5, 0x13, 0x3a, 0x65, + 0x34, 0xad, 0x7c, 0x14, 0x7e, 0x35, 0xd1, 0xca, 0xfe, 0x1a, 0x4a, 0xa7, 0x38, 0xc0, 0x79, 0x9d, + 0x9d, 0x6d, 0xc8, 0x7f, 0x19, 0x80, 0x3b, 0xd4, 0x6c, 0x9a, 0x10, 0x78, 0xe8, 0xd3, 0x21, 0xc6, + 0x65, 0xf4, 0xff, 0xa4, 0x02, 0xb9, 0x2e, 0x4a, 0x4f, 0x30, 0xed, 0xac, 0x58, 0xf0, 0x64, 0x88, + 0x7c, 0x0b, 0xe0, 0x69, 0x67, 0x75, 0xdb, 0x54, 0x69, 0xe5, 0x73, 0x47, 0x56, 0x6d, 0xec, 0xde, + 0xda, 0xc4, 0xbd, 0xb5, 0xd6, 0xc4, 0xbd, 0xee, 0x4a, 0x8c, 0xae, 0x2b, 0x72, 0x06, 0x9b, 0xe9, + 0xc6, 0x48, 0x73, 0x59, 0x8b, 0x57, 0x9c, 0xea, 0xca, 0x6d, 0x23, 0x5c, 0x92, 0xea, 0x8d, 0x24, + 0x75, 0xc8, 0x4b, 0xc5, 0x85, 0xb6, 0x8c, 0xa2, 0x0a, 0xcd, 0x6c, 0xc5, 0xa8, 0xae, 0x1d, 0x95, + 0x67, 0xf4, 0xaf, 0x35, 0xc7, 0xa0, 0x66, 0x84, 0x71, 0x57, 0x65, 0xe2, 0x64, 0x7b, 0xb0, 0x9a, + 0xcc, 0x92, 0x32, 0x98, 0xcd, 0xd6, 0x4f, 0x6e, 0xfd, 0xac, 0xd1, 0x6c, 0xd5, 0x5b, 0x8d, 0xf6, + 0xeb, 0x1f, 0x9b, 0xaf, 0x1a, 0x27, 0xe7, 0x2f, 0xce, 0x1b, 0xa7, 0x85, 0x07, 0xc4, 0x82, 0xe2, + 0x54, 0xb6, 0xfe, 0xa6, 0x7e, 0x7e, 0x51, 0x3f, 0xbe, 0x68, 0x14, 0x0c, 0xb2, 0x05, 0x9f, 0x4d, + 0xe7, 0xdc, 0x93, 0xef, 0xce, 0xdf, 0x34, 0x4e, 0x0b, 0x19, 0x7b, 0x1f, 0xcc, 0xba, 0xf0, 0x2e, + 0xd9, 0xf5, 0x3d, 0x9a, 0xf5, 0x14, 0xac, 0xd7, 0x3e, 0xbd, 0x27, 0xfa, 0xe8, 0xaf, 0x65, 0xd8, + 0xb8, 0x43, 0x35, 0x51, 0x5c, 0x33, 0x0f, 0x49, 0x00, 0x85, 0xd9, 0xa9, 0x27, 0x63, 0x51, 0x16, + 0x2c, 0x03, 0x6b, 0xd6, 0xb2, 0xf6, 0xc1, 0xef, 0xff, 0xfc, 0xfb, 0x21, 0xf3, 0x95, 0xbd, 0x15, + 0x2d, 0x31, 0xe9, 0x5c, 0x1f, 0x76, 0x50, 0xd1, 0xc3, 0xc4, 0xba, 0x94, 0xcf, 0x13, 0xbb, 0x81, + 0x78, 0x90, 0x9f, 0xda, 0x0d, 0x64, 0x4b, 0x5f, 0x38, 0x6f, 0x5f, 0xa4, 0x6b, 0xed, 0xe9, 0x5a, + 0x15, 0xf2, 0xf9, 0xc2, 0x5a, 0xce, 0x2f, 0xac, 0xfb, 0x2b, 0xf1, 0x61, 0x6d, 0x7a, 0x0e, 0xc9, + 0xb6, 0xbe, 0x6a, 0xfe, 0xb2, 0xb1, 0xca, 0xf3, 0x93, 0xe3, 0xc9, 0xb5, 0xbf, 0xd0, 0x45, 0xb7, + 0xc9, 0x62, 0x82, 0x91, 0x8c, 0xb3, 0x23, 0x16, 0xcb, 0xb8, 0x60, 0xf2, 0xac, 0x62, 0x6a, 0x00, + 0x1a, 0xd1, 0x6e, 0x9f, 0x30, 0xdc, 0xff, 0x14, 0xc3, 0x1b, 0xd8, 0x48, 0x19, 0x85, 0xec, 0xe8, + 0x92, 0x8b, 0x0c, 0xb4, 0xb0, 0x66, 0x4d, 0xd7, 0xac, 0xda, 0x7b, 0x1f, 0xaf, 0xf9, 0x3c, 0xf6, + 0x1a, 0xf9, 0xcd, 0x80, 0xcd, 0x39, 0xce, 0x23, 0xbb, 0xba, 0xfc, 0x62, 0x4f, 0x2e, 0x7c, 0xc0, + 0x37, 0xfa, 0x01, 0xfb, 0x76, 0xf5, 0x13, 0x0f, 0x08, 0x27, 0x57, 0x1f, 0xff, 0x61, 0xfc, 0x59, + 0xff, 0xc1, 0x2d, 0xc3, 0xa3, 0x2e, 0xf6, 0x68, 0x38, 0x50, 0x64, 0x83, 0xac, 0x43, 0xde, 0xca, + 0xe9, 0x17, 0x44, 0xf3, 0x19, 0xca, 0xb7, 0xbb, 0xb0, 0x03, 0xd9, 0x63, 0xa4, 0x02, 0x05, 0xd9, + 0x7c, 0x9c, 0xb1, 0xf2, 0x34, 0x54, 0x97, 0x5c, 0xb0, 0x1b, 0xfd, 0xdd, 0xab, 0x64, 0x3a, 0xab, + 0x00, 0xb7, 0x80, 0x07, 0x6f, 0x9f, 0xf5, 0x99, 0xba, 0x0c, 0x3b, 0x35, 0x8f, 0x0f, 0x9d, 0xab, + 0xb0, 0x83, 0xbd, 0x01, 0x7f, 0xe7, 0x04, 0x2c, 0xc0, 0x01, 0xf3, 0x51, 0x3a, 0xc9, 0xcf, 0x79, + 0x9f, 0xb7, 0xbd, 0x01, 0x43, 0x5f, 0x75, 0xb2, 0x9a, 0xc9, 0xb3, 0xff, 0x03, 0x00, 0x00, 0xff, + 0xff, 0x0b, 0x8f, 0xfa, 0xad, 0x2a, 0x08, 0x00, 0x00, } diff --git a/backend/api/go_client/experiment.pb.gw.go b/backend/api/go_client/experiment.pb.gw.go index 41e418679cc..43eb71f5bc5 100755 --- a/backend/api/go_client/experiment.pb.gw.go +++ b/backend/api/go_client/experiment.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -130,6 +130,60 @@ func request_ExperimentService_DeleteExperiment_0(ctx context.Context, marshaler } +func request_ExperimentService_ArchiveExperiment_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ArchiveExperimentRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + + protoReq.Id, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + + msg, err := client.ArchiveExperiment(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_ExperimentService_UnarchiveExperiment_0(ctx context.Context, marshaler runtime.Marshaler, client ExperimentServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq UnarchiveExperimentRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "id") + } + + protoReq.Id, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "id", err) + } + + msg, err := client.UnarchiveExperiment(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + // RegisterExperimentServiceHandlerFromEndpoint is same as RegisterExperimentServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterExperimentServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { @@ -248,6 +302,46 @@ func RegisterExperimentServiceHandlerClient(ctx context.Context, mux *runtime.Se }) + mux.Handle("POST", pattern_ExperimentService_ArchiveExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ExperimentService_ArchiveExperiment_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_ExperimentService_ArchiveExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("POST", pattern_ExperimentService_UnarchiveExperiment_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ExperimentService_UnarchiveExperiment_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_ExperimentService_UnarchiveExperiment_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -259,6 +353,10 @@ var ( pattern_ExperimentService_ListExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "experiments"}, "")) pattern_ExperimentService_DeleteExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "")) + + pattern_ExperimentService_ArchiveExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "archive")) + + pattern_ExperimentService_UnarchiveExperiment_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "experiments", "id"}, "unarchive")) ) var ( @@ -269,4 +367,8 @@ var ( forward_ExperimentService_ListExperiment_0 = runtime.ForwardResponseMessage forward_ExperimentService_DeleteExperiment_0 = runtime.ForwardResponseMessage + + forward_ExperimentService_ArchiveExperiment_0 = runtime.ForwardResponseMessage + + forward_ExperimentService_UnarchiveExperiment_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/go_client/filter.pb.go b/backend/api/go_client/filter.pb.go index aa9ad68ecae..088d1aa0dd6 100755 --- a/backend/api/go_client/filter.pb.go +++ b/backend/api/go_client/filter.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/filter.pb.gw.go b/backend/api/go_client/filter.pb.gw.go index 538896f6820..ee8aa8d0959 100755 --- a/backend/api/go_client/filter.pb.gw.go +++ b/backend/api/go_client/filter.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/job.pb.go b/backend/api/go_client/job.pb.go index 3d5a8802694..0fa9852fa57 100755 --- a/backend/api/go_client/job.pb.go +++ b/backend/api/go_client/job.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -64,7 +64,7 @@ func (x Job_Mode) String() string { return proto.EnumName(Job_Mode_name, int32(x)) } func (Job_Mode) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{10, 0} + return fileDescriptor_job_a950c472a7b16ea0, []int{10, 0} } type CreateJobRequest struct { @@ -78,7 +78,7 @@ func (m *CreateJobRequest) Reset() { *m = CreateJobRequest{} } func (m *CreateJobRequest) String() string { return proto.CompactTextString(m) } func (*CreateJobRequest) ProtoMessage() {} func (*CreateJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{0} + return fileDescriptor_job_a950c472a7b16ea0, []int{0} } func (m *CreateJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreateJobRequest.Unmarshal(m, b) @@ -116,7 +116,7 @@ func (m *GetJobRequest) Reset() { *m = GetJobRequest{} } func (m *GetJobRequest) String() string { return proto.CompactTextString(m) } func (*GetJobRequest) ProtoMessage() {} func (*GetJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{1} + return fileDescriptor_job_a950c472a7b16ea0, []int{1} } func (m *GetJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetJobRequest.Unmarshal(m, b) @@ -158,7 +158,7 @@ func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} } func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) } func (*ListJobsRequest) ProtoMessage() {} func (*ListJobsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{2} + return fileDescriptor_job_a950c472a7b16ea0, []int{2} } func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b) @@ -226,7 +226,7 @@ func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} } func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) } func (*ListJobsResponse) ProtoMessage() {} func (*ListJobsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{3} + return fileDescriptor_job_a950c472a7b16ea0, []int{3} } func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b) @@ -278,7 +278,7 @@ func (m *DeleteJobRequest) Reset() { *m = DeleteJobRequest{} } func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) } func (*DeleteJobRequest) ProtoMessage() {} func (*DeleteJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{4} + return fileDescriptor_job_a950c472a7b16ea0, []int{4} } func (m *DeleteJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DeleteJobRequest.Unmarshal(m, b) @@ -316,7 +316,7 @@ func (m *EnableJobRequest) Reset() { *m = EnableJobRequest{} } func (m *EnableJobRequest) String() string { return proto.CompactTextString(m) } func (*EnableJobRequest) ProtoMessage() {} func (*EnableJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{5} + return fileDescriptor_job_a950c472a7b16ea0, []int{5} } func (m *EnableJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_EnableJobRequest.Unmarshal(m, b) @@ -354,7 +354,7 @@ func (m *DisableJobRequest) Reset() { *m = DisableJobRequest{} } func (m *DisableJobRequest) String() string { return proto.CompactTextString(m) } func (*DisableJobRequest) ProtoMessage() {} func (*DisableJobRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{6} + return fileDescriptor_job_a950c472a7b16ea0, []int{6} } func (m *DisableJobRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DisableJobRequest.Unmarshal(m, b) @@ -394,7 +394,7 @@ func (m *CronSchedule) Reset() { *m = CronSchedule{} } func (m *CronSchedule) String() string { return proto.CompactTextString(m) } func (*CronSchedule) ProtoMessage() {} func (*CronSchedule) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{7} + return fileDescriptor_job_a950c472a7b16ea0, []int{7} } func (m *CronSchedule) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CronSchedule.Unmarshal(m, b) @@ -448,7 +448,7 @@ func (m *PeriodicSchedule) Reset() { *m = PeriodicSchedule{} } func (m *PeriodicSchedule) String() string { return proto.CompactTextString(m) } func (*PeriodicSchedule) ProtoMessage() {} func (*PeriodicSchedule) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{8} + return fileDescriptor_job_a950c472a7b16ea0, []int{8} } func (m *PeriodicSchedule) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PeriodicSchedule.Unmarshal(m, b) @@ -503,7 +503,7 @@ func (m *Trigger) Reset() { *m = Trigger{} } func (m *Trigger) String() string { return proto.CompactTextString(m) } func (*Trigger) ProtoMessage() {} func (*Trigger) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{9} + return fileDescriptor_job_a950c472a7b16ea0, []int{9} } func (m *Trigger) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Trigger.Unmarshal(m, b) @@ -640,6 +640,7 @@ type Job struct { Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` PipelineSpec *PipelineSpec `protobuf:"bytes,4,opt,name=pipeline_spec,json=pipelineSpec,proto3" json:"pipeline_spec,omitempty"` ResourceReferences []*ResourceReference `protobuf:"bytes,5,rep,name=resource_references,json=resourceReferences,proto3" json:"resource_references,omitempty"` + ServiceAccount string `protobuf:"bytes,18,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` MaxConcurrency int64 `protobuf:"varint,6,opt,name=max_concurrency,json=maxConcurrency,proto3" json:"max_concurrency,omitempty"` Trigger *Trigger `protobuf:"bytes,7,opt,name=trigger,proto3" json:"trigger,omitempty"` Mode Job_Mode `protobuf:"varint,8,opt,name=mode,proto3,enum=api.Job_Mode" json:"mode,omitempty"` @@ -648,6 +649,7 @@ type Job struct { Status string `protobuf:"bytes,11,opt,name=status,proto3" json:"status,omitempty"` Error string `protobuf:"bytes,12,opt,name=error,proto3" json:"error,omitempty"` Enabled bool `protobuf:"varint,16,opt,name=enabled,proto3" json:"enabled,omitempty"` + NoCatchup bool `protobuf:"varint,17,opt,name=no_catchup,json=noCatchup,proto3" json:"no_catchup,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -657,7 +659,7 @@ func (m *Job) Reset() { *m = Job{} } func (m *Job) String() string { return proto.CompactTextString(m) } func (*Job) ProtoMessage() {} func (*Job) Descriptor() ([]byte, []int) { - return fileDescriptor_job_f8459da7f9a91531, []int{10} + return fileDescriptor_job_a950c472a7b16ea0, []int{10} } func (m *Job) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Job.Unmarshal(m, b) @@ -712,6 +714,13 @@ func (m *Job) GetResourceReferences() []*ResourceReference { return nil } +func (m *Job) GetServiceAccount() string { + if m != nil { + return m.ServiceAccount + } + return "" +} + func (m *Job) GetMaxConcurrency() int64 { if m != nil { return m.MaxConcurrency @@ -768,6 +777,13 @@ func (m *Job) GetEnabled() bool { return false } +func (m *Job) GetNoCatchup() bool { + if m != nil { + return m.NoCatchup + } + return false +} + func init() { proto.RegisterType((*CreateJobRequest)(nil), "api.CreateJobRequest") proto.RegisterType((*GetJobRequest)(nil), "api.GetJobRequest") @@ -1020,79 +1036,81 @@ var _JobService_serviceDesc = grpc.ServiceDesc{ Metadata: "backend/api/job.proto", } -func init() { proto.RegisterFile("backend/api/job.proto", fileDescriptor_job_f8459da7f9a91531) } - -var fileDescriptor_job_f8459da7f9a91531 = []byte{ - // 1126 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x56, 0xdb, 0x72, 0x1b, 0x45, - 0x13, 0xb6, 0x0e, 0xd6, 0xa1, 0x2d, 0xc5, 0xf2, 0xc4, 0x76, 0xf6, 0x57, 0x92, 0xdf, 0xca, 0x42, - 0x25, 0x2e, 0x8a, 0x48, 0x95, 0xa4, 0xa0, 0x80, 0x3b, 0x3b, 0x32, 0x09, 0x49, 0xec, 0xa4, 0x56, - 0xa1, 0xa8, 0x0a, 0x17, 0x5b, 0xb3, 0xbb, 0x6d, 0x65, 0x62, 0x69, 0x67, 0x99, 0x99, 0x4d, 0x2c, - 0x53, 0xdc, 0x50, 0xc5, 0x0b, 0x00, 0x2f, 0xc0, 0x03, 0x70, 0xc5, 0x3b, 0xf0, 0x02, 0xbc, 0x02, - 0x0f, 0x42, 0xcd, 0xec, 0xac, 0xac, 0x43, 0x8c, 0x2f, 0xb9, 0x92, 0xfa, 0x9b, 0xaf, 0xa7, 0x4f, - 0xd3, 0xdd, 0x0b, 0x5b, 0x01, 0x0d, 0x4f, 0x30, 0x8e, 0x7a, 0x34, 0x61, 0xbd, 0x37, 0x3c, 0xe8, - 0x26, 0x82, 0x2b, 0x4e, 0x4a, 0x34, 0x61, 0xed, 0x1b, 0x43, 0xce, 0x87, 0x23, 0x34, 0x47, 0x34, - 0x8e, 0xb9, 0xa2, 0x8a, 0xf1, 0x58, 0x66, 0x94, 0xf6, 0x8e, 0x3d, 0x35, 0x52, 0x90, 0x1e, 0xf7, - 0x14, 0x1b, 0xa3, 0x54, 0x74, 0x9c, 0x58, 0xc2, 0xf5, 0x45, 0x02, 0x8e, 0x13, 0x35, 0xc9, 0x0f, - 0x67, 0xed, 0x26, 0x54, 0xd0, 0x31, 0x2a, 0x14, 0xf9, 0xd5, 0x73, 0x87, 0x2c, 0xc1, 0x11, 0x8b, - 0xd1, 0x97, 0x09, 0x86, 0x96, 0xf0, 0xe1, 0x2c, 0x41, 0xa0, 0xe4, 0xa9, 0x08, 0xd1, 0x17, 0x78, - 0x8c, 0x02, 0xe3, 0x10, 0x2d, 0x6b, 0x2e, 0x36, 0x91, 0xc6, 0x16, 0xfe, 0xd8, 0xfc, 0x84, 0x77, - 0x87, 0x18, 0xdf, 0x95, 0xef, 0xe8, 0x70, 0x88, 0xa2, 0xc7, 0x13, 0x13, 0xda, 0x7b, 0xc2, 0xbc, - 0x36, 0x7b, 0x09, 0x0a, 0xc1, 0xad, 0x93, 0x6e, 0x17, 0x5a, 0x0f, 0x05, 0x52, 0x85, 0x4f, 0x78, - 0xe0, 0xe1, 0x77, 0x29, 0x4a, 0x45, 0xda, 0x50, 0x7a, 0xc3, 0x03, 0xa7, 0xd0, 0x29, 0xec, 0xae, - 0xdd, 0xaf, 0x75, 0x69, 0xc2, 0xba, 0xfa, 0x54, 0x83, 0xee, 0x0e, 0x34, 0x1f, 0xa1, 0x9a, 0x21, - 0x5f, 0x81, 0x22, 0x8b, 0x0c, 0xb7, 0xee, 0x15, 0x59, 0xe4, 0xfe, 0x59, 0x80, 0xf5, 0x67, 0x4c, - 0x6a, 0x8a, 0xcc, 0x39, 0x37, 0x01, 0x12, 0x3a, 0x44, 0x5f, 0xf1, 0x13, 0x8c, 0x2d, 0xb7, 0xae, - 0x91, 0x97, 0x1a, 0x20, 0xd7, 0xc1, 0x08, 0xbe, 0x64, 0x67, 0xe8, 0x14, 0x3b, 0x85, 0xdd, 0x55, - 0xaf, 0xa6, 0x81, 0x01, 0x3b, 0x43, 0x72, 0x0d, 0xaa, 0x92, 0x0b, 0xe5, 0x07, 0x13, 0xa7, 0x64, - 0x14, 0x2b, 0x5a, 0xdc, 0x9f, 0x90, 0x2f, 0x61, 0x7b, 0x39, 0x67, 0xfe, 0x09, 0x4e, 0x9c, 0xb2, - 0x71, 0xbc, 0x65, 0x1c, 0xf7, 0x2c, 0xe5, 0x29, 0x4e, 0xbc, 0xcd, 0x9c, 0xef, 0xe5, 0xf4, 0xa7, - 0x38, 0x21, 0xdb, 0x50, 0x39, 0x66, 0x23, 0x85, 0xc2, 0x59, 0xcd, 0xee, 0xcf, 0x24, 0xf7, 0x1d, - 0xb4, 0xce, 0xe3, 0x90, 0x09, 0x8f, 0x25, 0x92, 0x1b, 0x50, 0x7e, 0xc3, 0x03, 0xe9, 0x14, 0x3a, - 0xa5, 0xb9, 0xd4, 0x18, 0x54, 0x87, 0xa9, 0xb8, 0xa2, 0xa3, 0x2c, 0x90, 0x92, 0x09, 0xa4, 0x6e, - 0x10, 0x13, 0xc9, 0x6d, 0x58, 0x8f, 0xf1, 0x54, 0xf9, 0x33, 0xa9, 0x28, 0x1a, 0x8b, 0x4d, 0x0d, - 0xbf, 0xc8, 0xd3, 0xe1, 0xba, 0xd0, 0xea, 0xe3, 0x08, 0xe7, 0x4a, 0xb2, 0x98, 0x65, 0x17, 0x5a, - 0x07, 0x31, 0x0d, 0x46, 0xff, 0xc6, 0xf9, 0x00, 0x36, 0xfa, 0x4c, 0x5e, 0x42, 0xfa, 0xb5, 0x00, - 0x8d, 0x87, 0x82, 0xc7, 0x83, 0xf0, 0x35, 0x46, 0xe9, 0x08, 0xc9, 0xe7, 0x00, 0x52, 0x51, 0xa1, - 0x7c, 0xdd, 0x08, 0xf6, 0x0d, 0xb4, 0xbb, 0x59, 0x13, 0x74, 0xf3, 0x26, 0xe8, 0xbe, 0xcc, 0xbb, - 0xc4, 0xab, 0x1b, 0xb6, 0x96, 0xc9, 0x27, 0x50, 0xc3, 0x38, 0xca, 0x14, 0x8b, 0x97, 0x2a, 0x56, - 0x31, 0x8e, 0x8c, 0x1a, 0x81, 0x72, 0x28, 0x78, 0x6c, 0xcb, 0x6b, 0xfe, 0xbb, 0xbf, 0x17, 0xa0, - 0xf5, 0x02, 0x05, 0xe3, 0x11, 0x0b, 0xff, 0x43, 0xd7, 0xee, 0xc0, 0x3a, 0x8b, 0x15, 0x8a, 0xb7, - 0xba, 0xa8, 0x18, 0xf2, 0x38, 0x32, 0x5e, 0x96, 0xbc, 0x2b, 0x39, 0x3c, 0x30, 0xa8, 0x4e, 0x63, - 0xf5, 0xa5, 0x60, 0xba, 0x0b, 0xc9, 0x67, 0xd0, 0xd4, 0x31, 0xf8, 0xd2, 0xfa, 0x6d, 0x3d, 0xdd, - 0x30, 0xaf, 0x65, 0x36, 0xd7, 0x8f, 0x57, 0xbc, 0x46, 0x38, 0x9b, 0xfb, 0x3e, 0x6c, 0x24, 0x36, - 0xe8, 0x73, 0xed, 0xcc, 0xdd, 0x2d, 0xa3, 0xbd, 0x98, 0x92, 0xc7, 0x2b, 0x5e, 0x2b, 0x59, 0xc0, - 0xf6, 0xeb, 0x50, 0x55, 0x99, 0x2b, 0xee, 0x1f, 0x65, 0x28, 0x3d, 0xe1, 0xc1, 0x62, 0xd5, 0x75, - 0xca, 0x63, 0x6a, 0x53, 0x51, 0xf7, 0xcc, 0x7f, 0xd2, 0x81, 0xb5, 0x08, 0x65, 0x28, 0x98, 0x19, - 0x22, 0xb6, 0x1a, 0xb3, 0x10, 0xf9, 0x14, 0x9a, 0x73, 0x63, 0xcc, 0x36, 0x5a, 0x16, 0xd8, 0x0b, - 0x7b, 0x32, 0x48, 0x30, 0xf4, 0x1a, 0xc9, 0x8c, 0x44, 0x1e, 0xc1, 0xd5, 0xe5, 0x4e, 0x95, 0xce, - 0xaa, 0x69, 0xa2, 0xed, 0xb9, 0x36, 0x9d, 0x76, 0xa6, 0x47, 0x96, 0x9a, 0x55, 0xea, 0x72, 0x8c, - 0xe9, 0xa9, 0x1f, 0xf2, 0x38, 0x4c, 0x85, 0xc6, 0x26, 0x4e, 0x25, 0x2b, 0xc7, 0x98, 0x9e, 0x3e, - 0x3c, 0x47, 0xc9, 0xed, 0x69, 0x0a, 0x9c, 0xaa, 0xf1, 0xb1, 0x61, 0xac, 0xd8, 0x0a, 0x79, 0xf9, - 0x21, 0xb9, 0x05, 0xe5, 0x31, 0x8f, 0xd0, 0xa9, 0x75, 0x0a, 0xbb, 0x57, 0xee, 0x37, 0xf3, 0x7e, - 0xee, 0x1e, 0xf2, 0x08, 0x3d, 0x73, 0xa4, 0x1f, 0x5d, 0x68, 0x06, 0x64, 0xe4, 0x53, 0xe5, 0xd4, - 0x2f, 0x7f, 0x74, 0x96, 0xbd, 0xa7, 0xb4, 0x6a, 0x9a, 0x44, 0xb9, 0x2a, 0x5c, 0xae, 0x6a, 0xd9, - 0x7b, 0x4a, 0x0f, 0x25, 0xa9, 0xa8, 0x4a, 0xa5, 0xb3, 0x66, 0x87, 0x9e, 0x91, 0xc8, 0x26, 0xac, - 0x9a, 0xe9, 0xed, 0x34, 0x0c, 0x9c, 0x09, 0xc4, 0x81, 0x2a, 0x9a, 0x69, 0x10, 0x39, 0xad, 0x4e, - 0x61, 0xb7, 0xe6, 0xe5, 0xa2, 0xfb, 0x00, 0xca, 0x3a, 0x16, 0xd2, 0x82, 0xc6, 0xd7, 0x47, 0x4f, - 0x8f, 0x9e, 0x7f, 0x73, 0xe4, 0x1f, 0x3e, 0xef, 0x1f, 0xb4, 0x56, 0xc8, 0x1a, 0x54, 0x0f, 0x8e, - 0xf6, 0xf6, 0x9f, 0x1d, 0xf4, 0x5b, 0x05, 0xd2, 0x80, 0x5a, 0xff, 0xab, 0x41, 0x26, 0x15, 0xef, - 0xff, 0x56, 0x06, 0x78, 0xc2, 0x83, 0x01, 0x8a, 0xb7, 0x2c, 0x44, 0x72, 0x08, 0xf5, 0xe9, 0x8a, - 0x20, 0x5b, 0xf6, 0x15, 0xcf, 0xaf, 0x8c, 0xf6, 0x74, 0x14, 0xba, 0x3b, 0x3f, 0xfe, 0xf5, 0xf7, - 0x2f, 0xc5, 0xff, 0xb9, 0x44, 0xaf, 0x1a, 0xd9, 0x7b, 0x7b, 0x2f, 0x40, 0x45, 0xef, 0xe9, 0xa5, - 0x2c, 0xbf, 0xd0, 0x1b, 0x84, 0x3c, 0x82, 0x4a, 0xb6, 0x41, 0x08, 0x31, 0x4a, 0x73, 0xeb, 0x64, - 0xf9, 0x22, 0x72, 0x6d, 0xf9, 0xa2, 0xde, 0xf7, 0x2c, 0xfa, 0x81, 0x0c, 0xa0, 0x96, 0x0f, 0x68, - 0xb2, 0x69, 0xd4, 0x16, 0xf6, 0x4e, 0x7b, 0x6b, 0x01, 0xcd, 0xa6, 0xb8, 0xdb, 0x36, 0x37, 0x6f, - 0x92, 0xf7, 0xb8, 0x48, 0x02, 0xa8, 0x4f, 0x07, 0xab, 0x0d, 0x76, 0x71, 0xd0, 0xb6, 0xb7, 0x97, - 0x6a, 0x78, 0xa0, 0xbf, 0x09, 0xdc, 0xdb, 0xe6, 0xde, 0x8e, 0xfb, 0xff, 0x0b, 0x3c, 0xee, 0x65, - 0x55, 0x21, 0x08, 0x70, 0x3e, 0x98, 0x49, 0xd6, 0x00, 0x4b, 0x93, 0xfa, 0x42, 0x2b, 0x77, 0x8c, - 0x95, 0x5b, 0xee, 0xce, 0x45, 0x56, 0xa2, 0xec, 0x2a, 0xf2, 0x2d, 0xd4, 0xa7, 0x7b, 0xc4, 0x86, - 0xb2, 0xb8, 0x57, 0x2e, 0x34, 0x62, 0x93, 0xff, 0xd1, 0x45, 0xc9, 0xdf, 0xff, 0xa9, 0xf0, 0xf3, - 0xde, 0xa1, 0x77, 0x03, 0xaa, 0x11, 0x1e, 0xd3, 0x74, 0xa4, 0xc8, 0x06, 0x59, 0x87, 0x66, 0x7b, - 0xcd, 0x98, 0x19, 0x98, 0xc7, 0xfa, 0x6a, 0x07, 0x6e, 0x42, 0x65, 0x1f, 0xa9, 0x40, 0x41, 0xae, - 0xd6, 0x8a, 0xed, 0x26, 0x4d, 0xd5, 0x6b, 0x2e, 0xd8, 0x99, 0xf9, 0x34, 0xe9, 0x14, 0x83, 0x06, - 0xc0, 0x94, 0xb0, 0xf2, 0xea, 0xc1, 0x90, 0xa9, 0xd7, 0x69, 0xd0, 0x0d, 0xf9, 0xb8, 0x77, 0x92, - 0x06, 0x78, 0x3c, 0xe2, 0xef, 0xa6, 0xdf, 0x4d, 0xb2, 0x37, 0xfb, 0x05, 0x33, 0xe4, 0x7e, 0x38, - 0x62, 0x18, 0xab, 0xa0, 0x62, 0x1c, 0x7f, 0xf0, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x80, 0x64, - 0x45, 0xce, 0x02, 0x0a, 0x00, 0x00, +func init() { proto.RegisterFile("backend/api/job.proto", fileDescriptor_job_a950c472a7b16ea0) } + +var fileDescriptor_job_a950c472a7b16ea0 = []byte{ + // 1162 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x56, 0x5d, 0x72, 0x1b, 0xc5, + 0x13, 0xb7, 0x3e, 0x6c, 0x49, 0x6d, 0xc9, 0x96, 0x27, 0xb6, 0xb3, 0x7f, 0x25, 0xf9, 0x5b, 0x59, + 0xa8, 0xc4, 0x45, 0x11, 0xa9, 0x92, 0x14, 0x14, 0xf0, 0x66, 0x5b, 0x26, 0x21, 0x89, 0x9d, 0xd4, + 0x2a, 0x14, 0x55, 0xe1, 0x61, 0x6b, 0x76, 0xb7, 0x2d, 0x6f, 0x2c, 0xcd, 0x2c, 0x33, 0xb3, 0x49, + 0x64, 0x8a, 0x17, 0xaa, 0xb8, 0x00, 0x70, 0x01, 0x0e, 0xc0, 0x35, 0xb8, 0x00, 0x57, 0xe0, 0x16, + 0xbc, 0x50, 0x33, 0x3b, 0x2b, 0xeb, 0x23, 0xc6, 0x8f, 0x3c, 0x49, 0xfd, 0xeb, 0x5f, 0xcf, 0xf4, + 0xc7, 0x74, 0xf7, 0xc2, 0x56, 0x40, 0xc3, 0x33, 0x64, 0x51, 0x97, 0x26, 0x71, 0xf7, 0x35, 0x0f, + 0x3a, 0x89, 0xe0, 0x8a, 0x93, 0x12, 0x4d, 0xe2, 0xd6, 0xcd, 0x01, 0xe7, 0x83, 0x21, 0x1a, 0x15, + 0x65, 0x8c, 0x2b, 0xaa, 0x62, 0xce, 0x64, 0x46, 0x69, 0xed, 0x58, 0xad, 0x91, 0x82, 0xf4, 0xa4, + 0xab, 0xe2, 0x11, 0x4a, 0x45, 0x47, 0x89, 0x25, 0xdc, 0x98, 0x27, 0xe0, 0x28, 0x51, 0xe3, 0x5c, + 0x39, 0x7d, 0x6f, 0x42, 0x05, 0x1d, 0xa1, 0x42, 0x91, 0x1f, 0x3d, 0xa3, 0x8c, 0x13, 0x1c, 0xc6, + 0x0c, 0x7d, 0x99, 0x60, 0x68, 0x09, 0x1f, 0x4e, 0x13, 0x04, 0x4a, 0x9e, 0x8a, 0x10, 0x7d, 0x81, + 0x27, 0x28, 0x90, 0x85, 0x68, 0x59, 0x33, 0xb1, 0x89, 0x94, 0x59, 0xf8, 0x63, 0xf3, 0x13, 0xde, + 0x1b, 0x20, 0xbb, 0x27, 0xdf, 0xd2, 0xc1, 0x00, 0x45, 0x97, 0x27, 0x26, 0xb4, 0xf7, 0x84, 0x79, + 0x7d, 0xfa, 0x10, 0x14, 0x82, 0x5b, 0x27, 0xdd, 0x0e, 0x34, 0x0f, 0x04, 0x52, 0x85, 0x4f, 0x78, + 0xe0, 0xe1, 0x77, 0x29, 0x4a, 0x45, 0x5a, 0x50, 0x7a, 0xcd, 0x03, 0xa7, 0xd0, 0x2e, 0xec, 0xae, + 0x3e, 0xa8, 0x76, 0x68, 0x12, 0x77, 0xb4, 0x56, 0x83, 0xee, 0x0e, 0x34, 0x1e, 0xa1, 0x9a, 0x22, + 0xaf, 0x41, 0x31, 0x8e, 0x0c, 0xb7, 0xe6, 0x15, 0xe3, 0xc8, 0xfd, 0xa3, 0x00, 0xeb, 0xcf, 0x62, + 0xa9, 0x29, 0x32, 0xe7, 0xdc, 0x02, 0x48, 0xe8, 0x00, 0x7d, 0xc5, 0xcf, 0x90, 0x59, 0x6e, 0x4d, + 0x23, 0x2f, 0x35, 0x40, 0x6e, 0x80, 0x11, 0x7c, 0x19, 0x9f, 0xa3, 0x53, 0x6c, 0x17, 0x76, 0x97, + 0xbd, 0xaa, 0x06, 0xfa, 0xf1, 0x39, 0x92, 0xeb, 0x50, 0x91, 0x5c, 0x28, 0x3f, 0x18, 0x3b, 0x25, + 0x63, 0xb8, 0xa2, 0xc5, 0xfd, 0x31, 0xf9, 0x12, 0xb6, 0x17, 0x73, 0xe6, 0x9f, 0xe1, 0xd8, 0x29, + 0x1b, 0xc7, 0x9b, 0xc6, 0x71, 0xcf, 0x52, 0x9e, 0xe2, 0xd8, 0xdb, 0xcc, 0xf9, 0x5e, 0x4e, 0x7f, + 0x8a, 0x63, 0xb2, 0x0d, 0x2b, 0x27, 0xf1, 0x50, 0xa1, 0x70, 0x96, 0xb3, 0xf3, 0x33, 0xc9, 0x7d, + 0x0b, 0xcd, 0x8b, 0x38, 0x64, 0xc2, 0x99, 0x44, 0x72, 0x13, 0xca, 0xaf, 0x79, 0x20, 0x9d, 0x42, + 0xbb, 0x34, 0x93, 0x1a, 0x83, 0xea, 0x30, 0x15, 0x57, 0x74, 0x98, 0x05, 0x52, 0x32, 0x81, 0xd4, + 0x0c, 0x62, 0x22, 0xb9, 0x03, 0xeb, 0x0c, 0xdf, 0x29, 0x7f, 0x2a, 0x15, 0x45, 0x73, 0x63, 0x43, + 0xc3, 0x2f, 0xf2, 0x74, 0xb8, 0x2e, 0x34, 0x7b, 0x38, 0xc4, 0x99, 0x92, 0xcc, 0x67, 0xd9, 0x85, + 0xe6, 0x21, 0xa3, 0xc1, 0xf0, 0xdf, 0x38, 0x1f, 0xc0, 0x46, 0x2f, 0x96, 0x57, 0x90, 0x7e, 0x2d, + 0x40, 0xfd, 0x40, 0x70, 0xd6, 0x0f, 0x4f, 0x31, 0x4a, 0x87, 0x48, 0x3e, 0x07, 0x90, 0x8a, 0x0a, + 0xe5, 0xeb, 0x46, 0xb0, 0x6f, 0xa0, 0xd5, 0xc9, 0x9a, 0xa0, 0x93, 0x37, 0x41, 0xe7, 0x65, 0xde, + 0x25, 0x5e, 0xcd, 0xb0, 0xb5, 0x4c, 0x3e, 0x81, 0x2a, 0xb2, 0x28, 0x33, 0x2c, 0x5e, 0x69, 0x58, + 0x41, 0x16, 0x19, 0x33, 0x02, 0xe5, 0x50, 0x70, 0x66, 0xcb, 0x6b, 0xfe, 0xbb, 0xbf, 0x17, 0xa0, + 0xf9, 0x02, 0x45, 0xcc, 0xa3, 0x38, 0xfc, 0x0f, 0x5d, 0xbb, 0x0b, 0xeb, 0x31, 0x53, 0x28, 0xde, + 0xe8, 0xa2, 0x62, 0xc8, 0x59, 0x64, 0xbc, 0x2c, 0x79, 0x6b, 0x39, 0xdc, 0x37, 0xa8, 0x4e, 0x63, + 0xe5, 0xa5, 0x88, 0x75, 0x17, 0x92, 0xcf, 0xa0, 0xa1, 0x63, 0xf0, 0xa5, 0xf5, 0xdb, 0x7a, 0xba, + 0x61, 0x5e, 0xcb, 0x74, 0xae, 0x1f, 0x2f, 0x79, 0xf5, 0x70, 0x3a, 0xf7, 0x3d, 0xd8, 0x48, 0x6c, + 0xd0, 0x17, 0xd6, 0x99, 0xbb, 0x5b, 0xc6, 0x7a, 0x3e, 0x25, 0x8f, 0x97, 0xbc, 0x66, 0x32, 0x87, + 0xed, 0xd7, 0xa0, 0xa2, 0x32, 0x57, 0xdc, 0xbf, 0xcb, 0x50, 0x7a, 0xc2, 0x83, 0xf9, 0xaa, 0xeb, + 0x94, 0x33, 0x6a, 0x53, 0x51, 0xf3, 0xcc, 0x7f, 0xd2, 0x86, 0xd5, 0x08, 0x65, 0x28, 0x62, 0x33, + 0x44, 0x6c, 0x35, 0xa6, 0x21, 0xf2, 0x29, 0x34, 0x66, 0xc6, 0x98, 0x6d, 0xb4, 0x2c, 0xb0, 0x17, + 0x56, 0xd3, 0x4f, 0x30, 0xf4, 0xea, 0xc9, 0x94, 0x44, 0x1e, 0xc1, 0xb5, 0xc5, 0x4e, 0x95, 0xce, + 0xb2, 0x69, 0xa2, 0xed, 0x99, 0x36, 0x9d, 0x74, 0xa6, 0x47, 0x16, 0x9a, 0x55, 0xea, 0x72, 0x48, + 0x14, 0x6f, 0xe2, 0x10, 0x7d, 0x1a, 0x86, 0x3c, 0x65, 0xca, 0x21, 0xc6, 0xcd, 0x35, 0x0b, 0xef, + 0x65, 0xa8, 0x26, 0x8e, 0xe8, 0x3b, 0x3f, 0xe4, 0x2c, 0x4c, 0x85, 0x36, 0x1e, 0x3b, 0x2b, 0x59, + 0xdd, 0x46, 0xf4, 0xdd, 0xc1, 0x05, 0x4a, 0xee, 0x4c, 0x72, 0xe5, 0x54, 0x4c, 0x30, 0x75, 0xe3, + 0x8e, 0x2d, 0xa5, 0x97, 0x2b, 0xc9, 0x6d, 0x28, 0x8f, 0x78, 0x84, 0x4e, 0xb5, 0x5d, 0xd8, 0x5d, + 0x7b, 0xd0, 0xc8, 0x1b, 0xbf, 0x73, 0xc4, 0x23, 0xf4, 0x8c, 0x4a, 0xbf, 0xce, 0xd0, 0x4c, 0xd2, + 0xc8, 0xa7, 0xca, 0xa9, 0x5d, 0xfd, 0x3a, 0x2d, 0x7b, 0x4f, 0x69, 0xd3, 0x34, 0x89, 0x72, 0x53, + 0xb8, 0xda, 0xd4, 0xb2, 0xf7, 0x94, 0x9e, 0x5e, 0x52, 0x51, 0x95, 0x4a, 0x67, 0xd5, 0x4e, 0x47, + 0x23, 0x91, 0x4d, 0x58, 0x36, 0x63, 0xde, 0xa9, 0x1b, 0x38, 0x13, 0x88, 0x03, 0x15, 0x34, 0x63, + 0x23, 0x72, 0x9a, 0xed, 0xc2, 0x6e, 0xd5, 0xcb, 0x45, 0x3d, 0xbb, 0x18, 0xf7, 0x43, 0xaa, 0xc2, + 0xd3, 0x34, 0x71, 0x36, 0x8c, 0xb2, 0xc6, 0xf8, 0x41, 0x06, 0xb8, 0x0f, 0xa1, 0xac, 0x43, 0x25, + 0x4d, 0xa8, 0x7f, 0x7d, 0xfc, 0xf4, 0xf8, 0xf9, 0x37, 0xc7, 0xfe, 0xd1, 0xf3, 0xde, 0x61, 0x73, + 0x89, 0xac, 0x42, 0xe5, 0xf0, 0x78, 0x6f, 0xff, 0xd9, 0x61, 0xaf, 0x59, 0x20, 0x75, 0xa8, 0xf6, + 0xbe, 0xea, 0x67, 0x52, 0xf1, 0xc1, 0x6f, 0x65, 0x80, 0x27, 0x3c, 0xe8, 0x67, 0xb5, 0x21, 0x47, + 0x50, 0x9b, 0xac, 0x1a, 0xb2, 0x65, 0xbb, 0x61, 0x76, 0xf5, 0xb4, 0x26, 0x23, 0xd5, 0xdd, 0xf9, + 0xf1, 0xcf, 0xbf, 0x7e, 0x29, 0xfe, 0xcf, 0x25, 0x7a, 0x65, 0xc9, 0xee, 0x9b, 0xfb, 0x01, 0x2a, + 0x7a, 0x5f, 0x2f, 0x77, 0xf9, 0x85, 0xde, 0x44, 0xe4, 0x11, 0xac, 0x64, 0x9b, 0x88, 0x10, 0x63, + 0x34, 0xb3, 0x96, 0x16, 0x0f, 0x22, 0xd7, 0x17, 0x0f, 0xea, 0x7e, 0x1f, 0x47, 0x3f, 0x90, 0x3e, + 0x54, 0xf3, 0x41, 0x4f, 0x36, 0x8d, 0xd9, 0xdc, 0xfe, 0x6a, 0x6d, 0xcd, 0xa1, 0xd9, 0x36, 0x70, + 0x5b, 0xe6, 0xe4, 0x4d, 0xf2, 0x1e, 0x17, 0x49, 0x00, 0xb5, 0xc9, 0x80, 0xb6, 0xc1, 0xce, 0x0f, + 0xec, 0xd6, 0xf6, 0x42, 0x89, 0x0f, 0xf5, 0xb7, 0x85, 0x7b, 0xc7, 0x9c, 0xdb, 0x76, 0xff, 0x7f, + 0x89, 0xc7, 0xdd, 0xac, 0x68, 0x04, 0x01, 0x2e, 0x06, 0x3c, 0xc9, 0x1a, 0x69, 0x61, 0xe2, 0x5f, + 0x7a, 0xcb, 0x5d, 0x73, 0xcb, 0x6d, 0x77, 0xe7, 0xb2, 0x5b, 0xa2, 0xec, 0x28, 0xf2, 0x2d, 0xd4, + 0x26, 0xfb, 0xc8, 0x86, 0x32, 0xbf, 0x9f, 0x2e, 0xbd, 0xc4, 0x26, 0xff, 0xa3, 0xcb, 0x92, 0xbf, + 0xff, 0x53, 0xe1, 0xe7, 0xbd, 0x23, 0xef, 0x26, 0x54, 0x22, 0x3c, 0xa1, 0xe9, 0x50, 0x91, 0x0d, + 0xb2, 0x0e, 0x8d, 0xd6, 0xaa, 0xb9, 0xa6, 0x6f, 0xde, 0xf2, 0xab, 0x1d, 0xb8, 0x05, 0x2b, 0xfb, + 0x48, 0x05, 0x0a, 0x72, 0xad, 0x5a, 0x6c, 0x35, 0x68, 0xaa, 0x4e, 0xb9, 0x88, 0xcf, 0xcd, 0x27, + 0x4e, 0xbb, 0x18, 0xd4, 0x01, 0x26, 0x84, 0xa5, 0x57, 0x0f, 0x07, 0xb1, 0x3a, 0x4d, 0x83, 0x4e, + 0xc8, 0x47, 0xdd, 0xb3, 0x34, 0xc0, 0x93, 0x21, 0x7f, 0x3b, 0xf9, 0xfe, 0x92, 0xdd, 0xe9, 0x2f, + 0xa1, 0x01, 0xf7, 0xc3, 0x61, 0x8c, 0x4c, 0x05, 0x2b, 0xc6, 0xf1, 0x87, 0xff, 0x04, 0x00, 0x00, + 0xff, 0xff, 0xfb, 0x9f, 0x07, 0x6b, 0x4a, 0x0a, 0x00, 0x00, } diff --git a/backend/api/go_client/job.pb.gw.go b/backend/api/go_client/job.pb.gw.go index d31d285bb50..8d13bec2a8d 100755 --- a/backend/api/go_client/job.pb.gw.go +++ b/backend/api/go_client/job.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/parameter.pb.go b/backend/api/go_client/parameter.pb.go index 0b1ffd245f1..d0ca22fcf21 100755 --- a/backend/api/go_client/parameter.pb.go +++ b/backend/api/go_client/parameter.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/parameter.pb.gw.go b/backend/api/go_client/parameter.pb.gw.go index 538896f6820..ee8aa8d0959 100755 --- a/backend/api/go_client/parameter.pb.gw.go +++ b/backend/api/go_client/parameter.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/pipeline.pb.go b/backend/api/go_client/pipeline.pb.go index 6962caad954..2308a03cdfa 100755 --- a/backend/api/go_client/pipeline.pb.go +++ b/backend/api/go_client/pipeline.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -52,7 +52,7 @@ func (m *Url) Reset() { *m = Url{} } func (m *Url) String() string { return proto.CompactTextString(m) } func (*Url) ProtoMessage() {} func (*Url) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{0} + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{0} } func (m *Url) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Url.Unmarshal(m, b) @@ -90,7 +90,7 @@ func (m *CreatePipelineRequest) Reset() { *m = CreatePipelineRequest{} } func (m *CreatePipelineRequest) String() string { return proto.CompactTextString(m) } func (*CreatePipelineRequest) ProtoMessage() {} func (*CreatePipelineRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{1} + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{1} } func (m *CreatePipelineRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreatePipelineRequest.Unmarshal(m, b) @@ -128,7 +128,7 @@ func (m *GetPipelineRequest) Reset() { *m = GetPipelineRequest{} } func (m *GetPipelineRequest) String() string { return proto.CompactTextString(m) } func (*GetPipelineRequest) ProtoMessage() {} func (*GetPipelineRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{2} + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{2} } func (m *GetPipelineRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetPipelineRequest.Unmarshal(m, b) @@ -169,7 +169,7 @@ func (m *ListPipelinesRequest) Reset() { *m = ListPipelinesRequest{} } func (m *ListPipelinesRequest) String() string { return proto.CompactTextString(m) } func (*ListPipelinesRequest) ProtoMessage() {} func (*ListPipelinesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{3} + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{3} } func (m *ListPipelinesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListPipelinesRequest.Unmarshal(m, b) @@ -230,7 +230,7 @@ func (m *ListPipelinesResponse) Reset() { *m = ListPipelinesResponse{} } func (m *ListPipelinesResponse) String() string { return proto.CompactTextString(m) } func (*ListPipelinesResponse) ProtoMessage() {} func (*ListPipelinesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{4} + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{4} } func (m *ListPipelinesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListPipelinesResponse.Unmarshal(m, b) @@ -282,7 +282,7 @@ func (m *DeletePipelineRequest) Reset() { *m = DeletePipelineRequest{} } func (m *DeletePipelineRequest) String() string { return proto.CompactTextString(m) } func (*DeletePipelineRequest) ProtoMessage() {} func (*DeletePipelineRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{5} + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{5} } func (m *DeletePipelineRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DeletePipelineRequest.Unmarshal(m, b) @@ -320,7 +320,7 @@ func (m *GetTemplateRequest) Reset() { *m = GetTemplateRequest{} } func (m *GetTemplateRequest) String() string { return proto.CompactTextString(m) } func (*GetTemplateRequest) ProtoMessage() {} func (*GetTemplateRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{6} + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{6} } func (m *GetTemplateRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetTemplateRequest.Unmarshal(m, b) @@ -358,7 +358,7 @@ func (m *GetTemplateResponse) Reset() { *m = GetTemplateResponse{} } func (m *GetTemplateResponse) String() string { return proto.CompactTextString(m) } func (*GetTemplateResponse) ProtoMessage() {} func (*GetTemplateResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{7} + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{7} } func (m *GetTemplateResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetTemplateResponse.Unmarshal(m, b) @@ -385,6 +385,282 @@ func (m *GetTemplateResponse) GetTemplate() string { return "" } +type GetPipelineVersionTemplateRequest struct { + VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetPipelineVersionTemplateRequest) Reset() { *m = GetPipelineVersionTemplateRequest{} } +func (m *GetPipelineVersionTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*GetPipelineVersionTemplateRequest) ProtoMessage() {} +func (*GetPipelineVersionTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{8} +} +func (m *GetPipelineVersionTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetPipelineVersionTemplateRequest.Unmarshal(m, b) +} +func (m *GetPipelineVersionTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetPipelineVersionTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *GetPipelineVersionTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetPipelineVersionTemplateRequest.Merge(dst, src) +} +func (m *GetPipelineVersionTemplateRequest) XXX_Size() int { + return xxx_messageInfo_GetPipelineVersionTemplateRequest.Size(m) +} +func (m *GetPipelineVersionTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetPipelineVersionTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetPipelineVersionTemplateRequest proto.InternalMessageInfo + +func (m *GetPipelineVersionTemplateRequest) GetVersionId() string { + if m != nil { + return m.VersionId + } + return "" +} + +type CreatePipelineVersionRequest struct { + Version *PipelineVersion `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreatePipelineVersionRequest) Reset() { *m = CreatePipelineVersionRequest{} } +func (m *CreatePipelineVersionRequest) String() string { return proto.CompactTextString(m) } +func (*CreatePipelineVersionRequest) ProtoMessage() {} +func (*CreatePipelineVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{9} +} +func (m *CreatePipelineVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreatePipelineVersionRequest.Unmarshal(m, b) +} +func (m *CreatePipelineVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreatePipelineVersionRequest.Marshal(b, m, deterministic) +} +func (dst *CreatePipelineVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreatePipelineVersionRequest.Merge(dst, src) +} +func (m *CreatePipelineVersionRequest) XXX_Size() int { + return xxx_messageInfo_CreatePipelineVersionRequest.Size(m) +} +func (m *CreatePipelineVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreatePipelineVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreatePipelineVersionRequest proto.InternalMessageInfo + +func (m *CreatePipelineVersionRequest) GetVersion() *PipelineVersion { + if m != nil { + return m.Version + } + return nil +} + +type GetPipelineVersionRequest struct { + VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetPipelineVersionRequest) Reset() { *m = GetPipelineVersionRequest{} } +func (m *GetPipelineVersionRequest) String() string { return proto.CompactTextString(m) } +func (*GetPipelineVersionRequest) ProtoMessage() {} +func (*GetPipelineVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{10} +} +func (m *GetPipelineVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetPipelineVersionRequest.Unmarshal(m, b) +} +func (m *GetPipelineVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetPipelineVersionRequest.Marshal(b, m, deterministic) +} +func (dst *GetPipelineVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetPipelineVersionRequest.Merge(dst, src) +} +func (m *GetPipelineVersionRequest) XXX_Size() int { + return xxx_messageInfo_GetPipelineVersionRequest.Size(m) +} +func (m *GetPipelineVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetPipelineVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetPipelineVersionRequest proto.InternalMessageInfo + +func (m *GetPipelineVersionRequest) GetVersionId() string { + if m != nil { + return m.VersionId + } + return "" +} + +type ListPipelineVersionsRequest struct { + ResourceKey *ResourceKey `protobuf:"bytes,1,opt,name=resource_key,json=resourceKey,proto3" json:"resource_key,omitempty"` + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + SortBy string `protobuf:"bytes,4,opt,name=sort_by,json=sortBy,proto3" json:"sort_by,omitempty"` + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListPipelineVersionsRequest) Reset() { *m = ListPipelineVersionsRequest{} } +func (m *ListPipelineVersionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListPipelineVersionsRequest) ProtoMessage() {} +func (*ListPipelineVersionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{11} +} +func (m *ListPipelineVersionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListPipelineVersionsRequest.Unmarshal(m, b) +} +func (m *ListPipelineVersionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListPipelineVersionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListPipelineVersionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListPipelineVersionsRequest.Merge(dst, src) +} +func (m *ListPipelineVersionsRequest) XXX_Size() int { + return xxx_messageInfo_ListPipelineVersionsRequest.Size(m) +} +func (m *ListPipelineVersionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListPipelineVersionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListPipelineVersionsRequest proto.InternalMessageInfo + +func (m *ListPipelineVersionsRequest) GetResourceKey() *ResourceKey { + if m != nil { + return m.ResourceKey + } + return nil +} + +func (m *ListPipelineVersionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListPipelineVersionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListPipelineVersionsRequest) GetSortBy() string { + if m != nil { + return m.SortBy + } + return "" +} + +func (m *ListPipelineVersionsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +type ListPipelineVersionsResponse struct { + Versions []*PipelineVersion `protobuf:"bytes,1,rep,name=versions,proto3" json:"versions,omitempty"` + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListPipelineVersionsResponse) Reset() { *m = ListPipelineVersionsResponse{} } +func (m *ListPipelineVersionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListPipelineVersionsResponse) ProtoMessage() {} +func (*ListPipelineVersionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{12} +} +func (m *ListPipelineVersionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListPipelineVersionsResponse.Unmarshal(m, b) +} +func (m *ListPipelineVersionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListPipelineVersionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListPipelineVersionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListPipelineVersionsResponse.Merge(dst, src) +} +func (m *ListPipelineVersionsResponse) XXX_Size() int { + return xxx_messageInfo_ListPipelineVersionsResponse.Size(m) +} +func (m *ListPipelineVersionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListPipelineVersionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListPipelineVersionsResponse proto.InternalMessageInfo + +func (m *ListPipelineVersionsResponse) GetVersions() []*PipelineVersion { + if m != nil { + return m.Versions + } + return nil +} + +func (m *ListPipelineVersionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListPipelineVersionsResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +type DeletePipelineVersionRequest struct { + VersionId string `protobuf:"bytes,1,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeletePipelineVersionRequest) Reset() { *m = DeletePipelineVersionRequest{} } +func (m *DeletePipelineVersionRequest) String() string { return proto.CompactTextString(m) } +func (*DeletePipelineVersionRequest) ProtoMessage() {} +func (*DeletePipelineVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{13} +} +func (m *DeletePipelineVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeletePipelineVersionRequest.Unmarshal(m, b) +} +func (m *DeletePipelineVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeletePipelineVersionRequest.Marshal(b, m, deterministic) +} +func (dst *DeletePipelineVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeletePipelineVersionRequest.Merge(dst, src) +} +func (m *DeletePipelineVersionRequest) XXX_Size() int { + return xxx_messageInfo_DeletePipelineVersionRequest.Size(m) +} +func (m *DeletePipelineVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeletePipelineVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeletePipelineVersionRequest proto.InternalMessageInfo + +func (m *DeletePipelineVersionRequest) GetVersionId() string { + if m != nil { + return m.VersionId + } + return "" +} + type Pipeline struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` CreatedAt *timestamp.Timestamp `protobuf:"bytes,2,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` @@ -393,6 +669,7 @@ type Pipeline struct { Parameters []*Parameter `protobuf:"bytes,5,rep,name=parameters,proto3" json:"parameters,omitempty"` Url *Url `protobuf:"bytes,7,opt,name=url,proto3" json:"url,omitempty"` Error string `protobuf:"bytes,6,opt,name=error,proto3" json:"error,omitempty"` + DefaultVersion *PipelineVersion `protobuf:"bytes,8,opt,name=default_version,json=defaultVersion,proto3" json:"default_version,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -402,7 +679,7 @@ func (m *Pipeline) Reset() { *m = Pipeline{} } func (m *Pipeline) String() string { return proto.CompactTextString(m) } func (*Pipeline) ProtoMessage() {} func (*Pipeline) Descriptor() ([]byte, []int) { - return fileDescriptor_pipeline_a461ac7e0daee5b3, []int{8} + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{14} } func (m *Pipeline) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Pipeline.Unmarshal(m, b) @@ -471,6 +748,99 @@ func (m *Pipeline) GetError() string { return "" } +func (m *Pipeline) GetDefaultVersion() *PipelineVersion { + if m != nil { + return m.DefaultVersion + } + return nil +} + +type PipelineVersion struct { + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + CreatedAt *timestamp.Timestamp `protobuf:"bytes,3,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + Parameters []*Parameter `protobuf:"bytes,4,rep,name=parameters,proto3" json:"parameters,omitempty"` + CodeSourceUrl string `protobuf:"bytes,5,opt,name=code_source_url,json=codeSourceUrl,proto3" json:"code_source_url,omitempty"` + PackageUrl *Url `protobuf:"bytes,6,opt,name=package_url,json=packageUrl,proto3" json:"package_url,omitempty"` + ResourceReferences []*ResourceReference `protobuf:"bytes,7,rep,name=resource_references,json=resourceReferences,proto3" json:"resource_references,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PipelineVersion) Reset() { *m = PipelineVersion{} } +func (m *PipelineVersion) String() string { return proto.CompactTextString(m) } +func (*PipelineVersion) ProtoMessage() {} +func (*PipelineVersion) Descriptor() ([]byte, []int) { + return fileDescriptor_pipeline_1d626cf7548f7a80, []int{15} +} +func (m *PipelineVersion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PipelineVersion.Unmarshal(m, b) +} +func (m *PipelineVersion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PipelineVersion.Marshal(b, m, deterministic) +} +func (dst *PipelineVersion) XXX_Merge(src proto.Message) { + xxx_messageInfo_PipelineVersion.Merge(dst, src) +} +func (m *PipelineVersion) XXX_Size() int { + return xxx_messageInfo_PipelineVersion.Size(m) +} +func (m *PipelineVersion) XXX_DiscardUnknown() { + xxx_messageInfo_PipelineVersion.DiscardUnknown(m) +} + +var xxx_messageInfo_PipelineVersion proto.InternalMessageInfo + +func (m *PipelineVersion) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *PipelineVersion) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PipelineVersion) GetCreatedAt() *timestamp.Timestamp { + if m != nil { + return m.CreatedAt + } + return nil +} + +func (m *PipelineVersion) GetParameters() []*Parameter { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *PipelineVersion) GetCodeSourceUrl() string { + if m != nil { + return m.CodeSourceUrl + } + return "" +} + +func (m *PipelineVersion) GetPackageUrl() *Url { + if m != nil { + return m.PackageUrl + } + return nil +} + +func (m *PipelineVersion) GetResourceReferences() []*ResourceReference { + if m != nil { + return m.ResourceReferences + } + return nil +} + func init() { proto.RegisterType((*Url)(nil), "api.Url") proto.RegisterType((*CreatePipelineRequest)(nil), "api.CreatePipelineRequest") @@ -480,7 +850,14 @@ func init() { proto.RegisterType((*DeletePipelineRequest)(nil), "api.DeletePipelineRequest") proto.RegisterType((*GetTemplateRequest)(nil), "api.GetTemplateRequest") proto.RegisterType((*GetTemplateResponse)(nil), "api.GetTemplateResponse") + proto.RegisterType((*GetPipelineVersionTemplateRequest)(nil), "api.GetPipelineVersionTemplateRequest") + proto.RegisterType((*CreatePipelineVersionRequest)(nil), "api.CreatePipelineVersionRequest") + proto.RegisterType((*GetPipelineVersionRequest)(nil), "api.GetPipelineVersionRequest") + proto.RegisterType((*ListPipelineVersionsRequest)(nil), "api.ListPipelineVersionsRequest") + proto.RegisterType((*ListPipelineVersionsResponse)(nil), "api.ListPipelineVersionsResponse") + proto.RegisterType((*DeletePipelineVersionRequest)(nil), "api.DeletePipelineVersionRequest") proto.RegisterType((*Pipeline)(nil), "api.Pipeline") + proto.RegisterType((*PipelineVersion)(nil), "api.PipelineVersion") } // Reference imports to suppress errors if they are not otherwise used. @@ -500,6 +877,11 @@ type PipelineServiceClient interface { ListPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) DeletePipeline(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*empty.Empty, error) GetTemplate(ctx context.Context, in *GetTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) + CreatePipelineVersion(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) + GetPipelineVersion(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) + ListPipelineVersions(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) + DeletePipelineVersion(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*empty.Empty, error) + GetPipelineVersionTemplate(ctx context.Context, in *GetPipelineVersionTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) } type pipelineServiceClient struct { @@ -555,6 +937,51 @@ func (c *pipelineServiceClient) GetTemplate(ctx context.Context, in *GetTemplate return out, nil } +func (c *pipelineServiceClient) CreatePipelineVersion(ctx context.Context, in *CreatePipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { + out := new(PipelineVersion) + err := c.cc.Invoke(ctx, "/api.PipelineService/CreatePipelineVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipelineVersion(ctx context.Context, in *GetPipelineVersionRequest, opts ...grpc.CallOption) (*PipelineVersion, error) { + out := new(PipelineVersion) + err := c.cc.Invoke(ctx, "/api.PipelineService/GetPipelineVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) ListPipelineVersions(ctx context.Context, in *ListPipelineVersionsRequest, opts ...grpc.CallOption) (*ListPipelineVersionsResponse, error) { + out := new(ListPipelineVersionsResponse) + err := c.cc.Invoke(ctx, "/api.PipelineService/ListPipelineVersions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) DeletePipelineVersion(ctx context.Context, in *DeletePipelineVersionRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/api.PipelineService/DeletePipelineVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelineServiceClient) GetPipelineVersionTemplate(ctx context.Context, in *GetPipelineVersionTemplateRequest, opts ...grpc.CallOption) (*GetTemplateResponse, error) { + out := new(GetTemplateResponse) + err := c.cc.Invoke(ctx, "/api.PipelineService/GetPipelineVersionTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // PipelineServiceServer is the server API for PipelineService service. type PipelineServiceServer interface { CreatePipeline(context.Context, *CreatePipelineRequest) (*Pipeline, error) @@ -562,6 +989,11 @@ type PipelineServiceServer interface { ListPipelines(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) DeletePipeline(context.Context, *DeletePipelineRequest) (*empty.Empty, error) GetTemplate(context.Context, *GetTemplateRequest) (*GetTemplateResponse, error) + CreatePipelineVersion(context.Context, *CreatePipelineVersionRequest) (*PipelineVersion, error) + GetPipelineVersion(context.Context, *GetPipelineVersionRequest) (*PipelineVersion, error) + ListPipelineVersions(context.Context, *ListPipelineVersionsRequest) (*ListPipelineVersionsResponse, error) + DeletePipelineVersion(context.Context, *DeletePipelineVersionRequest) (*empty.Empty, error) + GetPipelineVersionTemplate(context.Context, *GetPipelineVersionTemplateRequest) (*GetTemplateResponse, error) } func RegisterPipelineServiceServer(s *grpc.Server, srv PipelineServiceServer) { @@ -658,6 +1090,96 @@ func _PipelineService_GetTemplate_Handler(srv interface{}, ctx context.Context, return interceptor(ctx, in, info, handler) } +func _PipelineService_CreatePipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreatePipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).CreatePipelineVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.PipelineService/CreatePipelineVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).CreatePipelineVersion(ctx, req.(*CreatePipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipelineVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.PipelineService/GetPipelineVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipelineVersion(ctx, req.(*GetPipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_ListPipelineVersions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListPipelineVersionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).ListPipelineVersions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.PipelineService/ListPipelineVersions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).ListPipelineVersions(ctx, req.(*ListPipelineVersionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_DeletePipelineVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePipelineVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).DeletePipelineVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.PipelineService/DeletePipelineVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).DeletePipelineVersion(ctx, req.(*DeletePipelineVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelineService_GetPipelineVersionTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineVersionTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelineServiceServer).GetPipelineVersionTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/api.PipelineService/GetPipelineVersionTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelineServiceServer).GetPipelineVersionTemplate(ctx, req.(*GetPipelineVersionTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + var _PipelineService_serviceDesc = grpc.ServiceDesc{ ServiceName: "api.PipelineService", HandlerType: (*PipelineServiceServer)(nil), @@ -682,64 +1204,109 @@ var _PipelineService_serviceDesc = grpc.ServiceDesc{ MethodName: "GetTemplate", Handler: _PipelineService_GetTemplate_Handler, }, + { + MethodName: "CreatePipelineVersion", + Handler: _PipelineService_CreatePipelineVersion_Handler, + }, + { + MethodName: "GetPipelineVersion", + Handler: _PipelineService_GetPipelineVersion_Handler, + }, + { + MethodName: "ListPipelineVersions", + Handler: _PipelineService_ListPipelineVersions_Handler, + }, + { + MethodName: "DeletePipelineVersion", + Handler: _PipelineService_DeletePipelineVersion_Handler, + }, + { + MethodName: "GetPipelineVersionTemplate", + Handler: _PipelineService_GetPipelineVersionTemplate_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "backend/api/pipeline.proto", } func init() { - proto.RegisterFile("backend/api/pipeline.proto", fileDescriptor_pipeline_a461ac7e0daee5b3) -} - -var fileDescriptor_pipeline_a461ac7e0daee5b3 = []byte{ - // 771 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x54, 0x4f, 0x53, 0x3b, 0x45, - 0x10, 0x75, 0x13, 0x08, 0x49, 0xe7, 0x97, 0x50, 0x0e, 0x7f, 0xb2, 0x2c, 0x20, 0x71, 0xa5, 0x30, - 0xa8, 0xec, 0x16, 0x70, 0xd2, 0x1b, 0x51, 0xcb, 0x8b, 0x56, 0x51, 0x01, 0x2e, 0x78, 0x48, 0xcd, - 0x26, 0x9d, 0x65, 0x64, 0xb3, 0xb3, 0xce, 0xcc, 0x82, 0x60, 0x79, 0xd1, 0xf2, 0xe4, 0x4d, 0xcf, - 0x7e, 0x2a, 0xbf, 0x82, 0x57, 0xbf, 0x83, 0xb5, 0xb3, 0x3b, 0x21, 0xff, 0xe0, 0x94, 0xf4, 0xeb, - 0xb7, 0xd3, 0xfd, 0x7a, 0x5e, 0x0f, 0x38, 0x01, 0x1d, 0xdc, 0x63, 0x3c, 0xf4, 0x69, 0xc2, 0xfc, - 0x84, 0x25, 0x18, 0xb1, 0x18, 0xbd, 0x44, 0x70, 0xc5, 0x49, 0x99, 0x26, 0xcc, 0xd9, 0x0b, 0x39, - 0x0f, 0x23, 0xd4, 0x79, 0x1a, 0xc7, 0x5c, 0x51, 0xc5, 0x78, 0x2c, 0x73, 0x8a, 0x73, 0x50, 0x64, - 0x75, 0x14, 0xa4, 0x23, 0x5f, 0xb1, 0x31, 0x4a, 0x45, 0xc7, 0x49, 0x41, 0xd8, 0x9d, 0x27, 0xe0, - 0x38, 0x51, 0x4f, 0x26, 0x39, 0x53, 0x9c, 0x0a, 0x3a, 0x46, 0x85, 0xa2, 0x48, 0xb6, 0xa6, 0x93, - 0x28, 0x04, 0x37, 0x89, 0xcf, 0xf4, 0xcf, 0xe0, 0x24, 0xc4, 0xf8, 0x44, 0x3e, 0xd2, 0x30, 0x44, - 0xe1, 0xf3, 0x44, 0x77, 0xb5, 0xd8, 0xa1, 0xdb, 0x81, 0xf2, 0x8d, 0x88, 0xc8, 0x87, 0xf0, 0xce, - 0xa8, 0xeb, 0xa7, 0x22, 0xb2, 0xad, 0xb6, 0xd5, 0xa9, 0xf5, 0xea, 0x06, 0xbb, 0x11, 0x91, 0xdb, - 0x85, 0xad, 0x2f, 0x05, 0x52, 0x85, 0x97, 0x05, 0xd8, 0xc3, 0x1f, 0x53, 0x94, 0x8a, 0x1c, 0x43, - 0xd5, 0xf0, 0xf4, 0x77, 0xf5, 0xb3, 0x86, 0x47, 0x13, 0xe6, 0x4d, 0x78, 0x93, 0xb4, 0x7b, 0x08, - 0xe4, 0x1b, 0x54, 0xf3, 0x07, 0x34, 0xa1, 0xc4, 0x86, 0x45, 0xc9, 0x12, 0x1b, 0xba, 0xbf, 0x59, - 0xb0, 0xf9, 0x2d, 0x93, 0x13, 0x9e, 0x34, 0xc4, 0x7d, 0x80, 0x84, 0x86, 0xd8, 0x57, 0xfc, 0x1e, - 0xe3, 0xe2, 0x83, 0x5a, 0x86, 0x5c, 0x67, 0x00, 0xd9, 0x05, 0x1d, 0xf4, 0x25, 0x7b, 0x46, 0xbb, - 0xd4, 0xb6, 0x3a, 0xab, 0xbd, 0x6a, 0x06, 0x5c, 0xb1, 0x67, 0x24, 0x2d, 0x58, 0x93, 0x5c, 0xa8, - 0x7e, 0xf0, 0x64, 0x97, 0xf5, 0x87, 0x95, 0x2c, 0xec, 0x3e, 0x91, 0x6d, 0xa8, 0x8c, 0x58, 0xa4, - 0x50, 0xd8, 0x2b, 0x39, 0x9e, 0x47, 0xee, 0x1f, 0x16, 0x6c, 0xcd, 0x75, 0x21, 0x13, 0x1e, 0x4b, - 0x24, 0x9f, 0x42, 0xcd, 0x28, 0x92, 0xb6, 0xd5, 0x2e, 0x2f, 0x2a, 0x7e, 0xc9, 0x67, 0x3d, 0x2b, - 0xae, 0x68, 0x94, 0x77, 0x55, 0xd6, 0x5d, 0xd5, 0x34, 0xa2, 0xdb, 0x3a, 0x82, 0xf5, 0x18, 0x7f, - 0x52, 0xfd, 0x29, 0x5d, 0x25, 0xdd, 0x46, 0x23, 0x83, 0x2f, 0x8d, 0x36, 0xf7, 0x63, 0xd8, 0xfa, - 0x0a, 0x23, 0x5c, 0x9c, 0xfe, 0xfc, 0xf0, 0xf2, 0x11, 0x5f, 0xe3, 0x38, 0x89, 0xa8, 0x7a, 0x95, - 0x75, 0x0a, 0x1b, 0x33, 0xac, 0x42, 0x99, 0x03, 0x55, 0x55, 0x60, 0x05, 0x79, 0x12, 0xbb, 0xff, - 0x59, 0x50, 0x35, 0xc5, 0xe7, 0xcf, 0x23, 0x9f, 0x03, 0x0c, 0xb4, 0x39, 0x86, 0x7d, 0xaa, 0xb4, - 0x82, 0xfa, 0x99, 0xe3, 0xe5, 0xe6, 0xf6, 0x8c, 0xb9, 0xbd, 0x6b, 0xe3, 0xfe, 0x5e, 0xad, 0x60, - 0x5f, 0x28, 0x42, 0x60, 0x25, 0xa6, 0x63, 0x2c, 0x6e, 0x45, 0xff, 0x27, 0x6d, 0xa8, 0x0f, 0x51, - 0x0e, 0x04, 0xd3, 0xbe, 0x2d, 0x2e, 0x66, 0x1a, 0x22, 0x5e, 0x66, 0x85, 0x62, 0x23, 0xa4, 0xbd, - 0xaa, 0x2f, 0xa1, 0x99, 0x5f, 0x82, 0x81, 0x7b, 0x53, 0x0c, 0xe2, 0x40, 0x39, 0xf3, 0xf5, 0x9a, - 0xee, 0xac, 0xaa, 0x89, 0x37, 0x22, 0xea, 0x65, 0x20, 0xd9, 0x84, 0x55, 0xbd, 0x40, 0x76, 0x45, - 0xd7, 0xc9, 0x83, 0xb3, 0xbf, 0x57, 0x60, 0xdd, 0xe8, 0xbd, 0x42, 0xf1, 0xc0, 0x06, 0x48, 0x46, - 0xd0, 0x9c, 0xdd, 0x01, 0xe2, 0xe8, 0xa3, 0x96, 0x2e, 0x86, 0x33, 0x6b, 0x0a, 0xf7, 0xf8, 0xd7, - 0x7f, 0xfe, 0xfd, 0xab, 0xf4, 0x91, 0xdb, 0xca, 0x56, 0x56, 0xfa, 0x0f, 0xa7, 0x01, 0x2a, 0x7a, - 0x3a, 0x79, 0x55, 0xe4, 0x17, 0x93, 0x3d, 0x21, 0xdf, 0x43, 0x7d, 0x6a, 0x4f, 0x48, 0x4b, 0x1f, - 0xb4, 0xb8, 0x39, 0xf3, 0x15, 0x0e, 0x75, 0x85, 0x0f, 0xc8, 0xde, 0x2b, 0x15, 0xfc, 0x9f, 0xd9, - 0xf0, 0x17, 0x12, 0x42, 0x63, 0xc6, 0xd7, 0x64, 0x47, 0x9f, 0xb2, 0x6c, 0xe3, 0x1c, 0x67, 0x59, - 0x2a, 0x37, 0x8b, 0x7b, 0xa0, 0xab, 0xed, 0x90, 0xd7, 0xf4, 0x90, 0x1f, 0xa0, 0x39, 0xeb, 0xd9, - 0x62, 0x5a, 0x4b, 0x8d, 0xec, 0x6c, 0x2f, 0xd8, 0xe5, 0xeb, 0xec, 0x2d, 0x34, 0xa2, 0x3e, 0x79, - 0x5b, 0x54, 0xa2, 0x27, 0x66, 0x0c, 0xfd, 0x32, 0xb1, 0xb9, 0x45, 0x70, 0xec, 0xc5, 0x44, 0x21, - 0xc7, 0xd3, 0x75, 0x3a, 0xe4, 0xe8, 0xad, 0x3a, 0xbe, 0x59, 0x07, 0xd9, 0xfd, 0xdd, 0xfa, 0xf3, - 0xe2, 0xbb, 0xde, 0x1e, 0xac, 0x0d, 0x71, 0x44, 0xd3, 0x48, 0x91, 0xf7, 0xc9, 0x3a, 0x34, 0x9c, - 0xba, 0x2e, 0x70, 0xa5, 0xa8, 0x4a, 0xe5, 0xed, 0x01, 0xec, 0x43, 0xa5, 0x8b, 0x54, 0xa0, 0x20, - 0x1b, 0xd5, 0x92, 0xd3, 0xa0, 0xa9, 0xba, 0xe3, 0x82, 0x3d, 0xeb, 0x37, 0xb8, 0x5d, 0x0a, 0xde, - 0x01, 0x4c, 0x08, 0xef, 0xdd, 0x9e, 0x87, 0x4c, 0xdd, 0xa5, 0x81, 0x37, 0xe0, 0x63, 0xff, 0x3e, - 0x0d, 0x70, 0x14, 0xf1, 0xc7, 0xa9, 0x06, 0xa6, 0x5f, 0xfc, 0x90, 0xf7, 0x07, 0x11, 0xc3, 0x58, - 0x05, 0x15, 0x3d, 0xaf, 0xf3, 0xff, 0x03, 0x00, 0x00, 0xff, 0xff, 0x8f, 0x62, 0x5e, 0x5b, 0xab, - 0x06, 0x00, 0x00, + proto.RegisterFile("backend/api/pipeline.proto", fileDescriptor_pipeline_1d626cf7548f7a80) +} + +var fileDescriptor_pipeline_1d626cf7548f7a80 = []byte{ + // 1184 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x57, 0xdd, 0x6e, 0x1b, 0x45, + 0x14, 0x66, 0xed, 0xc4, 0xb1, 0x8f, 0xeb, 0x04, 0xa6, 0x4d, 0xe2, 0x6c, 0x9c, 0xc6, 0x59, 0xa2, + 0x34, 0x09, 0xad, 0xdd, 0x24, 0x08, 0x41, 0xa4, 0x22, 0x35, 0x80, 0x2a, 0xc4, 0x8f, 0x2a, 0x27, + 0xe1, 0xa2, 0x5c, 0x58, 0xe3, 0xf5, 0xb1, 0xbb, 0x64, 0xbd, 0xbb, 0xcc, 0x8c, 0x53, 0x92, 0xaa, + 0x12, 0x2a, 0x20, 0x21, 0x71, 0x07, 0x17, 0xdc, 0xf1, 0x04, 0x3c, 0x02, 0x12, 0x0f, 0xc1, 0x0d, + 0x0f, 0xc0, 0x83, 0xa0, 0x1d, 0xcf, 0xac, 0x77, 0xd7, 0x3f, 0x49, 0xae, 0x92, 0x39, 0xe7, 0x9b, + 0x3d, 0xe7, 0x7c, 0xf3, 0xcd, 0x39, 0x63, 0x30, 0x5b, 0xd4, 0x3e, 0x43, 0xaf, 0x5d, 0xa7, 0x81, + 0x53, 0x0f, 0x9c, 0x00, 0x5d, 0xc7, 0xc3, 0x5a, 0xc0, 0x7c, 0xe1, 0x93, 0x2c, 0x0d, 0x1c, 0xb3, + 0xd2, 0xf5, 0xfd, 0xae, 0x8b, 0xd2, 0x4f, 0x3d, 0xcf, 0x17, 0x54, 0x38, 0xbe, 0xc7, 0x07, 0x10, + 0x73, 0x5d, 0x79, 0xe5, 0xaa, 0xd5, 0xef, 0xd4, 0x85, 0xd3, 0x43, 0x2e, 0x68, 0x2f, 0x50, 0x80, + 0xd5, 0x34, 0x00, 0x7b, 0x81, 0xb8, 0x50, 0xce, 0xe5, 0x78, 0x70, 0x64, 0xcc, 0x67, 0x7a, 0x57, + 0x22, 0x2b, 0xca, 0x68, 0x0f, 0x05, 0x6a, 0xe7, 0xfa, 0xb8, 0x94, 0x9b, 0x3c, 0x40, 0x5b, 0x01, + 0x36, 0xe3, 0x00, 0x86, 0xdc, 0xef, 0x33, 0x1b, 0x9b, 0x0c, 0x3b, 0xc8, 0xd0, 0xb3, 0x55, 0x75, + 0xe6, 0x7d, 0xf9, 0xc7, 0x7e, 0xd0, 0x45, 0xef, 0x01, 0x7f, 0x41, 0xbb, 0x5d, 0x64, 0x75, 0x3f, + 0x90, 0xc5, 0x8d, 0x16, 0x6a, 0x6d, 0x43, 0xf6, 0x94, 0xb9, 0x64, 0x03, 0x6e, 0x45, 0x11, 0xfb, + 0xcc, 0x2d, 0x1b, 0x55, 0x63, 0xbb, 0xd0, 0x28, 0x6a, 0xdb, 0x29, 0x73, 0xad, 0x23, 0x58, 0xfc, + 0x88, 0x21, 0x15, 0xf8, 0x54, 0x19, 0x1b, 0xf8, 0x6d, 0x1f, 0xb9, 0x20, 0x3b, 0x90, 0xd7, 0x38, + 0xb9, 0xaf, 0xb8, 0x5f, 0xaa, 0xd1, 0xc0, 0xa9, 0x45, 0xb8, 0xc8, 0x6d, 0x6d, 0x02, 0x79, 0x82, + 0x22, 0xfd, 0x81, 0x79, 0xc8, 0x38, 0x6d, 0x15, 0x32, 0xe3, 0xb4, 0xad, 0x1f, 0x0c, 0xb8, 0xf3, + 0xb9, 0xc3, 0x23, 0x1c, 0xd7, 0xc0, 0x35, 0x80, 0x80, 0x76, 0xb1, 0x29, 0xfc, 0x33, 0xf4, 0xd4, + 0x86, 0x42, 0x68, 0x39, 0x09, 0x0d, 0x64, 0x15, 0xe4, 0xa2, 0xc9, 0x9d, 0x4b, 0x2c, 0x67, 0xaa, + 0xc6, 0xf6, 0x6c, 0x23, 0x1f, 0x1a, 0x8e, 0x9d, 0x4b, 0x24, 0xcb, 0x30, 0xc7, 0x7d, 0x26, 0x9a, + 0xad, 0x8b, 0x72, 0x56, 0x6e, 0xcc, 0x85, 0xcb, 0xa3, 0x0b, 0xb2, 0x04, 0xb9, 0x8e, 0xe3, 0x0a, + 0x64, 0xe5, 0x99, 0x81, 0x7d, 0xb0, 0xb2, 0x7e, 0x31, 0x60, 0x31, 0x95, 0x05, 0x0f, 0x7c, 0x8f, + 0x23, 0x79, 0x07, 0x0a, 0xba, 0x22, 0x5e, 0x36, 0xaa, 0xd9, 0xd1, 0x8a, 0x87, 0xfe, 0x30, 0x67, + 0xe1, 0x0b, 0xea, 0x0e, 0xb2, 0xca, 0xca, 0xac, 0x0a, 0xd2, 0x22, 0xd3, 0xda, 0x82, 0x05, 0x0f, + 0xbf, 0x13, 0xcd, 0x58, 0x5d, 0x19, 0x99, 0x46, 0x29, 0x34, 0x3f, 0xd5, 0xb5, 0x59, 0xf7, 0x60, + 0xf1, 0x63, 0x74, 0x71, 0x94, 0xfd, 0x34, 0x79, 0x03, 0x8a, 0x4f, 0xb0, 0x17, 0xb8, 0x54, 0x4c, + 0x44, 0xed, 0xc1, 0xed, 0x04, 0x4a, 0x55, 0x66, 0x42, 0x5e, 0x28, 0x9b, 0x02, 0x47, 0x6b, 0xeb, + 0x08, 0x36, 0x62, 0x67, 0xf7, 0x15, 0x32, 0xee, 0xf8, 0x5e, 0x3a, 0xce, 0x1a, 0xc0, 0xf9, 0xc0, + 0xd3, 0x8c, 0xe2, 0x15, 0x94, 0xe5, 0xd3, 0xb6, 0xf5, 0x25, 0x54, 0x92, 0x1a, 0x52, 0x9f, 0xd1, + 0xdb, 0x6b, 0x30, 0xa7, 0xc0, 0x4a, 0x49, 0x77, 0x12, 0xbc, 0x6a, 0xb4, 0x06, 0x59, 0x87, 0xb0, + 0x32, 0x9a, 0xd3, 0x35, 0x73, 0xf9, 0xdb, 0x80, 0xd5, 0xf8, 0xf9, 0xaa, 0xdd, 0x91, 0xd8, 0x0e, + 0xe0, 0x56, 0x74, 0xc7, 0xce, 0xf0, 0x42, 0x25, 0xf4, 0xa6, 0x4c, 0xa8, 0xa1, 0x1c, 0x9f, 0xe1, + 0x45, 0xa3, 0xc8, 0x86, 0x8b, 0xe9, 0x12, 0x4c, 0xca, 0x37, 0x9b, 0x96, 0x6f, 0x4c, 0xa1, 0x33, + 0x13, 0x14, 0x3a, 0x9b, 0x50, 0xe8, 0xef, 0x06, 0x54, 0xc6, 0x57, 0xa0, 0x8e, 0xf3, 0x21, 0xe4, + 0x55, 0xbd, 0x5a, 0xa7, 0xe3, 0xf9, 0x8c, 0x50, 0xd7, 0x95, 0xe3, 0x15, 0xaa, 0xb6, 0x1e, 0x41, + 0x25, 0xa9, 0xd6, 0x9b, 0x1d, 0xcd, 0x9f, 0x19, 0xc8, 0xeb, 0x9d, 0x69, 0xe9, 0x92, 0x0f, 0x00, + 0x6c, 0xa9, 0xa1, 0x76, 0x93, 0x0a, 0x99, 0x5d, 0x71, 0xdf, 0xac, 0x0d, 0xda, 0x71, 0x4d, 0xb7, + 0xe3, 0xda, 0x89, 0xee, 0xd7, 0x8d, 0x82, 0x42, 0x3f, 0x16, 0x84, 0xc0, 0x8c, 0x47, 0x7b, 0xa8, + 0xa8, 0x97, 0xff, 0x93, 0x2a, 0x14, 0xdb, 0xc8, 0x6d, 0xe6, 0xc8, 0x16, 0xa9, 0x98, 0x8f, 0x9b, + 0x48, 0x2d, 0x3c, 0x36, 0xd5, 0xaa, 0x79, 0x79, 0x56, 0xf2, 0x38, 0x3f, 0xe0, 0x51, 0x9b, 0x1b, + 0x31, 0x04, 0x31, 0x21, 0x1b, 0xb6, 0xd0, 0x39, 0x99, 0x59, 0x5e, 0x02, 0x4f, 0x99, 0xdb, 0x08, + 0x8d, 0xe4, 0x0e, 0xcc, 0xca, 0x79, 0x50, 0xce, 0xc9, 0x38, 0x83, 0x05, 0x79, 0x04, 0x0b, 0x6d, + 0xec, 0xd0, 0xbe, 0x2b, 0x9a, 0x5a, 0xfe, 0xf9, 0x29, 0xf2, 0x9f, 0x57, 0x60, 0xb5, 0xb6, 0xfe, + 0xca, 0xc0, 0x42, 0x0a, 0x33, 0xc2, 0x9a, 0x2e, 0x3d, 0x13, 0x2b, 0x3d, 0xc9, 0x64, 0xf6, 0x26, + 0x4c, 0x26, 0x39, 0x99, 0xb9, 0x92, 0x93, 0x2d, 0x58, 0xb0, 0xfd, 0x36, 0x36, 0xd5, 0x7d, 0x0a, + 0xf9, 0x19, 0x68, 0xb9, 0x14, 0x9a, 0x8f, 0xa5, 0x35, 0x9c, 0x43, 0x3b, 0x50, 0x0c, 0xa8, 0x7d, + 0x16, 0xaa, 0x2f, 0xc4, 0xe4, 0x52, 0x1c, 0x82, 0x72, 0x86, 0xd0, 0x27, 0x70, 0x7b, 0x74, 0x06, + 0xf2, 0xf2, 0x9c, 0xcc, 0x65, 0x29, 0x71, 0x4d, 0x1b, 0xda, 0xdd, 0x20, 0x2c, 0x6d, 0xe2, 0xfb, + 0xff, 0x16, 0x86, 0xf4, 0x1d, 0x23, 0x3b, 0x77, 0x6c, 0x24, 0x1d, 0x98, 0x4f, 0x36, 0x2a, 0x62, + 0xca, 0x2f, 0x8e, 0x9d, 0x80, 0x66, 0xb2, 0xfb, 0x5b, 0x3b, 0xaf, 0xff, 0xf9, 0xef, 0xb7, 0xcc, + 0xdb, 0xd6, 0x72, 0x38, 0xa8, 0x79, 0xfd, 0x7c, 0xaf, 0x85, 0x82, 0xee, 0x45, 0x23, 0x9d, 0x1f, + 0x46, 0x03, 0x91, 0x7c, 0x0d, 0xc5, 0x58, 0x03, 0x23, 0xcb, 0xf2, 0x43, 0xa3, 0x23, 0x32, 0x1d, + 0x61, 0x53, 0x46, 0xb8, 0x4b, 0x2a, 0x13, 0x22, 0xd4, 0x5f, 0x3a, 0xed, 0x57, 0xa4, 0x0b, 0xa5, + 0xc4, 0x00, 0x23, 0x2b, 0xf2, 0x2b, 0xe3, 0x46, 0xab, 0x69, 0x8e, 0x73, 0x0d, 0xda, 0x88, 0xb5, + 0x2e, 0xa3, 0xad, 0x90, 0x49, 0xf5, 0x90, 0x6f, 0x60, 0x3e, 0x79, 0xdd, 0x15, 0x5b, 0x63, 0x27, + 0x96, 0xb9, 0x34, 0x22, 0xb1, 0x4f, 0xc2, 0xb7, 0x93, 0x2e, 0x6a, 0x77, 0x7a, 0x51, 0x81, 0x64, + 0x4c, 0xcf, 0x9d, 0x21, 0x63, 0xa9, 0x49, 0x64, 0x96, 0x47, 0x1d, 0xaa, 0x9c, 0x9a, 0x8c, 0xb3, + 0x4d, 0xb6, 0xa6, 0xc5, 0xa9, 0xeb, 0xb9, 0xc7, 0xc9, 0x6b, 0x23, 0xfd, 0xf2, 0xd1, 0x97, 0x6c, + 0x63, 0x8c, 0x26, 0x92, 0x9d, 0xce, 0x1c, 0x7b, 0x83, 0xad, 0x87, 0x32, 0x85, 0x5d, 0x6b, 0x7d, + 0x7c, 0x0a, 0xba, 0x0b, 0xf0, 0x43, 0x3d, 0xe9, 0xc8, 0xf7, 0x46, 0xe2, 0xe9, 0xa4, 0x33, 0xb8, + 0x9b, 0x16, 0xcc, 0xb5, 0xc2, 0xbf, 0x2b, 0xc3, 0xd7, 0xc8, 0xfd, 0x2b, 0xc2, 0xd7, 0x5f, 0x0e, + 0xbb, 0xf4, 0x2b, 0xf2, 0x63, 0xea, 0x59, 0xa6, 0xc7, 0x0d, 0xa9, 0x8e, 0x68, 0x27, 0x35, 0x4b, + 0xcd, 0x8d, 0x29, 0x08, 0x75, 0x2a, 0xf7, 0x64, 0x4e, 0x1b, 0xe4, 0x2a, 0x4a, 0xc8, 0xcf, 0x46, + 0xfa, 0x29, 0x94, 0x3c, 0x8e, 0x69, 0x83, 0x67, 0xa2, 0xf6, 0x14, 0x23, 0xbb, 0x37, 0x63, 0xe4, + 0x0f, 0x03, 0xcc, 0xc9, 0x6f, 0x22, 0xb2, 0x35, 0xe1, 0x70, 0xae, 0x2f, 0xd5, 0x0f, 0x65, 0x5a, + 0xef, 0x93, 0xf7, 0x6e, 0x92, 0xd6, 0x50, 0xba, 0x47, 0x3f, 0x19, 0xbf, 0x3e, 0xfe, 0xa2, 0x51, + 0x81, 0x39, 0x35, 0x31, 0xc8, 0x5b, 0x64, 0x01, 0x4a, 0x66, 0x51, 0x06, 0x3c, 0x16, 0x54, 0xf4, + 0xf9, 0xb3, 0x75, 0x58, 0x83, 0xdc, 0x11, 0x52, 0x86, 0x8c, 0xdc, 0xce, 0x67, 0xcc, 0x12, 0xed, + 0x8b, 0xe7, 0x3e, 0x73, 0x2e, 0xe5, 0xef, 0x84, 0x6a, 0xa6, 0x75, 0x0b, 0x20, 0x02, 0xbc, 0xf1, + 0xec, 0xa0, 0xeb, 0x88, 0xe7, 0xfd, 0x56, 0xcd, 0xf6, 0x7b, 0xf5, 0xb3, 0x7e, 0x0b, 0x3b, 0xae, + 0xff, 0x22, 0x76, 0x77, 0xe2, 0x3f, 0x51, 0xba, 0x7e, 0xd3, 0x76, 0x1d, 0xf4, 0x44, 0x2b, 0x27, + 0xe9, 0x3e, 0xf8, 0x3f, 0x00, 0x00, 0xff, 0xff, 0xb1, 0x47, 0x56, 0x80, 0x96, 0x0d, 0x00, 0x00, } diff --git a/backend/api/go_client/pipeline.pb.gw.go b/backend/api/go_client/pipeline.pb.gw.go index 44e21967217..0ec180e87bd 100755 --- a/backend/api/go_client/pipeline.pb.gw.go +++ b/backend/api/go_client/pipeline.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -157,6 +157,121 @@ func request_PipelineService_GetTemplate_0(ctx context.Context, marshaler runtim } +func request_PipelineService_CreatePipelineVersion_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq CreatePipelineVersionRequest + var metadata runtime.ServerMetadata + + newReader, berr := utilities.IOReaderFactory(req.Body) + if berr != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr) + } + if err := marshaler.NewDecoder(newReader()).Decode(&protoReq.Version); err != nil && err != io.EOF { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.CreatePipelineVersion(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_PipelineService_GetPipelineVersion_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq GetPipelineVersionRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["version_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") + } + + protoReq.VersionId, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) + } + + msg, err := client.GetPipelineVersion(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +var ( + filter_PipelineService_ListPipelineVersions_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_PipelineService_ListPipelineVersions_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ListPipelineVersionsRequest + var metadata runtime.ServerMetadata + + if err := runtime.PopulateQueryParameters(&protoReq, req.URL.Query(), filter_PipelineService_ListPipelineVersions_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListPipelineVersions(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_PipelineService_DeletePipelineVersion_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq DeletePipelineVersionRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["version_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") + } + + protoReq.VersionId, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) + } + + msg, err := client.DeletePipelineVersion(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func request_PipelineService_GetPipelineVersionTemplate_0(ctx context.Context, marshaler runtime.Marshaler, client PipelineServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq GetPipelineVersionTemplateRequest + var metadata runtime.ServerMetadata + + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["version_id"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "version_id") + } + + protoReq.VersionId, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "version_id", err) + } + + msg, err := client.GetPipelineVersionTemplate(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + // RegisterPipelineServiceHandlerFromEndpoint is same as RegisterPipelineServiceHandler but // automatically dials to "endpoint" and closes the connection when "ctx" gets done. func RegisterPipelineServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { @@ -295,6 +410,106 @@ func RegisterPipelineServiceHandlerClient(ctx context.Context, mux *runtime.Serv }) + mux.Handle("POST", pattern_PipelineService_CreatePipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_PipelineService_CreatePipelineVersion_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_PipelineService_CreatePipelineVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_PipelineService_GetPipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_PipelineService_GetPipelineVersion_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_PipelineService_GetPipelineVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_PipelineService_ListPipelineVersions_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_PipelineService_ListPipelineVersions_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_PipelineService_ListPipelineVersions_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("DELETE", pattern_PipelineService_DeletePipelineVersion_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_PipelineService_DeletePipelineVersion_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_PipelineService_DeletePipelineVersion_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + mux.Handle("GET", pattern_PipelineService_GetPipelineVersionTemplate_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_PipelineService_GetPipelineVersionTemplate_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_PipelineService_GetPipelineVersionTemplate_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -308,6 +523,16 @@ var ( pattern_PipelineService_DeletePipeline_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipelines", "id"}, "")) pattern_PipelineService_GetTemplate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "pipelines", "id", "templates"}, "")) + + pattern_PipelineService_CreatePipelineVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "pipeline_versions"}, "")) + + pattern_PipelineService_GetPipelineVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipeline_versions", "version_id"}, "")) + + pattern_PipelineService_ListPipelineVersions_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "pipeline_versions"}, "")) + + pattern_PipelineService_DeletePipelineVersion_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "pipeline_versions", "version_id"}, "")) + + pattern_PipelineService_GetPipelineVersionTemplate_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 2, 4}, []string{"apis", "v1beta1", "pipeline_versions", "version_id", "templates"}, "")) ) var ( @@ -320,4 +545,14 @@ var ( forward_PipelineService_DeletePipeline_0 = runtime.ForwardResponseMessage forward_PipelineService_GetTemplate_0 = runtime.ForwardResponseMessage + + forward_PipelineService_CreatePipelineVersion_0 = runtime.ForwardResponseMessage + + forward_PipelineService_GetPipelineVersion_0 = runtime.ForwardResponseMessage + + forward_PipelineService_ListPipelineVersions_0 = runtime.ForwardResponseMessage + + forward_PipelineService_DeletePipelineVersion_0 = runtime.ForwardResponseMessage + + forward_PipelineService_GetPipelineVersionTemplate_0 = runtime.ForwardResponseMessage ) diff --git a/backend/api/go_client/pipeline_spec.pb.go b/backend/api/go_client/pipeline_spec.pb.go index ec343520ade..56e7b96d41e 100755 --- a/backend/api/go_client/pipeline_spec.pb.go +++ b/backend/api/go_client/pipeline_spec.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/pipeline_spec.pb.gw.go b/backend/api/go_client/pipeline_spec.pb.gw.go index 538896f6820..ee8aa8d0959 100755 --- a/backend/api/go_client/pipeline_spec.pb.gw.go +++ b/backend/api/go_client/pipeline_spec.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/report.pb.go b/backend/api/go_client/report.pb.go index 27e5902f444..a22530db649 100755 --- a/backend/api/go_client/report.pb.go +++ b/backend/api/go_client/report.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/report.pb.gw.go b/backend/api/go_client/report.pb.gw.go index ccafe707944..97ab458b46f 100755 --- a/backend/api/go_client/report.pb.gw.go +++ b/backend/api/go_client/report.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/resource_reference.pb.go b/backend/api/go_client/resource_reference.pb.go index 45fe30247b5..208097d9da8 100755 --- a/backend/api/go_client/resource_reference.pb.go +++ b/backend/api/go_client/resource_reference.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -38,24 +38,33 @@ const ( ResourceType_UNKNOWN_RESOURCE_TYPE ResourceType = 0 ResourceType_EXPERIMENT ResourceType = 1 ResourceType_JOB ResourceType = 2 + ResourceType_PIPELINE ResourceType = 3 + ResourceType_PIPELINE_VERSION ResourceType = 4 + ResourceType_NAMESPACE ResourceType = 5 ) var ResourceType_name = map[int32]string{ 0: "UNKNOWN_RESOURCE_TYPE", 1: "EXPERIMENT", 2: "JOB", + 3: "PIPELINE", + 4: "PIPELINE_VERSION", + 5: "NAMESPACE", } var ResourceType_value = map[string]int32{ "UNKNOWN_RESOURCE_TYPE": 0, "EXPERIMENT": 1, "JOB": 2, + "PIPELINE": 3, + "PIPELINE_VERSION": 4, + "NAMESPACE": 5, } func (x ResourceType) String() string { return proto.EnumName(ResourceType_name, int32(x)) } func (ResourceType) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_resource_reference_05b185b0f444334d, []int{0} + return fileDescriptor_resource_reference_876ea904b7b7aed8, []int{0} } type Relationship int32 @@ -81,7 +90,7 @@ func (x Relationship) String() string { return proto.EnumName(Relationship_name, int32(x)) } func (Relationship) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_resource_reference_05b185b0f444334d, []int{1} + return fileDescriptor_resource_reference_876ea904b7b7aed8, []int{1} } type ResourceKey struct { @@ -96,7 +105,7 @@ func (m *ResourceKey) Reset() { *m = ResourceKey{} } func (m *ResourceKey) String() string { return proto.CompactTextString(m) } func (*ResourceKey) ProtoMessage() {} func (*ResourceKey) Descriptor() ([]byte, []int) { - return fileDescriptor_resource_reference_05b185b0f444334d, []int{0} + return fileDescriptor_resource_reference_876ea904b7b7aed8, []int{0} } func (m *ResourceKey) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ResourceKey.Unmarshal(m, b) @@ -143,7 +152,7 @@ func (m *ResourceReference) Reset() { *m = ResourceReference{} } func (m *ResourceReference) String() string { return proto.CompactTextString(m) } func (*ResourceReference) ProtoMessage() {} func (*ResourceReference) Descriptor() ([]byte, []int) { - return fileDescriptor_resource_reference_05b185b0f444334d, []int{1} + return fileDescriptor_resource_reference_876ea904b7b7aed8, []int{1} } func (m *ResourceReference) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ResourceReference.Unmarshal(m, b) @@ -192,30 +201,32 @@ func init() { } func init() { - proto.RegisterFile("backend/api/resource_reference.proto", fileDescriptor_resource_reference_05b185b0f444334d) -} - -var fileDescriptor_resource_reference_05b185b0f444334d = []byte{ - // 325 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x91, 0x41, 0x6b, 0xc2, 0x40, - 0x10, 0x85, 0x4d, 0x62, 0x2b, 0x8e, 0x22, 0x71, 0x69, 0x21, 0xbd, 0x89, 0xb4, 0x20, 0x1e, 0x12, - 0x50, 0xbc, 0x57, 0xed, 0x42, 0xad, 0x6d, 0x22, 0x6b, 0xc4, 0xb6, 0x97, 0x90, 0xc4, 0x51, 0x17, - 0x63, 0x76, 0x89, 0x91, 0x92, 0x6b, 0x7f, 0x79, 0x31, 0x54, 0xa2, 0xb7, 0x81, 0xef, 0xf1, 0xcd, - 0x83, 0x07, 0x8f, 0x81, 0x1f, 0xee, 0x30, 0x5e, 0x59, 0xbe, 0xe4, 0x56, 0x82, 0x07, 0x71, 0x4c, - 0x42, 0xf4, 0x12, 0x5c, 0x63, 0x82, 0x71, 0x88, 0xa6, 0x4c, 0x44, 0x2a, 0x88, 0xe6, 0x4b, 0xde, - 0x7e, 0x81, 0x1a, 0xfb, 0x0f, 0x4c, 0x31, 0x23, 0x4f, 0x50, 0x4e, 0x33, 0x89, 0x86, 0xd2, 0x52, - 0x3a, 0x8d, 0x5e, 0xd3, 0xf4, 0x25, 0x37, 0xcf, 0xdc, 0xcd, 0x24, 0xb2, 0x1c, 0x93, 0x06, 0xa8, - 0x7c, 0x65, 0xa8, 0x2d, 0xa5, 0x53, 0x65, 0x2a, 0x5f, 0xb5, 0x7f, 0x15, 0x68, 0x9e, 0x63, 0xec, - 0xfc, 0x86, 0xb4, 0x41, 0xdb, 0x61, 0x96, 0xbb, 0x6a, 0x3d, 0xfd, 0xca, 0x35, 0xc5, 0x8c, 0x9d, - 0x20, 0x21, 0x50, 0x8e, 0xfd, 0x3d, 0x1a, 0x5a, 0xee, 0xca, 0x6f, 0x32, 0x80, 0x7a, 0x82, 0x91, - 0x9f, 0x72, 0x11, 0x1f, 0xb6, 0x5c, 0xe6, 0x7f, 0x8a, 0x32, 0x05, 0x60, 0x57, 0xb1, 0xee, 0x08, - 0xea, 0x97, 0x55, 0xc9, 0x03, 0xdc, 0x2f, 0xec, 0xa9, 0xed, 0x2c, 0x6d, 0x8f, 0xd1, 0xb9, 0xb3, - 0x60, 0x63, 0xea, 0xb9, 0x5f, 0x33, 0xaa, 0x97, 0x48, 0x03, 0x80, 0x7e, 0xce, 0x28, 0x9b, 0x7c, - 0x50, 0xdb, 0xd5, 0x15, 0x52, 0x01, 0xed, 0xcd, 0x19, 0xe9, 0x6a, 0xf7, 0xf9, 0xe4, 0x28, 0x9c, - 0xc4, 0x80, 0xbb, 0xc2, 0xf1, 0x3e, 0x74, 0x27, 0x8e, 0x3d, 0x7f, 0x9d, 0xcc, 0xf4, 0x12, 0xa9, - 0xc2, 0x8d, 0xb3, 0xb4, 0x29, 0xd3, 0x15, 0x52, 0x83, 0xca, 0x98, 0xd1, 0xa1, 0xeb, 0x30, 0x5d, - 0x1d, 0x0d, 0xbe, 0xfb, 0x1b, 0x9e, 0x6e, 0x8f, 0x81, 0x19, 0x8a, 0xbd, 0xb5, 0x3b, 0x06, 0xb8, - 0x8e, 0xc4, 0x8f, 0x25, 0xb9, 0xc4, 0x88, 0xc7, 0x78, 0xb0, 0x2e, 0xb7, 0xd9, 0x08, 0x2f, 0x8c, - 0x38, 0xc6, 0x69, 0x70, 0x9b, 0x6f, 0xd2, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0xa6, 0xdd, 0xe5, - 0xee, 0xbb, 0x01, 0x00, 0x00, + proto.RegisterFile("backend/api/resource_reference.proto", fileDescriptor_resource_reference_876ea904b7b7aed8) +} + +var fileDescriptor_resource_reference_876ea904b7b7aed8 = []byte{ + // 366 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x91, 0xc1, 0x6b, 0x9c, 0x40, + 0x14, 0xc6, 0xa3, 0x6e, 0x9a, 0xee, 0xdb, 0xed, 0x32, 0x79, 0xa4, 0x60, 0x6f, 0x61, 0x69, 0x21, + 0xe4, 0xa0, 0x90, 0x90, 0x7b, 0xcd, 0x76, 0xa0, 0x76, 0x93, 0x51, 0x46, 0xd3, 0xb4, 0xbd, 0x88, + 0xba, 0x2f, 0xc9, 0xb0, 0x46, 0x07, 0x57, 0x29, 0x5e, 0xfb, 0x97, 0x97, 0x48, 0xc4, 0xec, 0x6d, + 0x86, 0xdf, 0xc7, 0xf7, 0xfb, 0xe0, 0xc1, 0xe7, 0x2c, 0xcd, 0xb7, 0x54, 0x6e, 0xdc, 0x54, 0x2b, + 0xb7, 0xa6, 0x5d, 0xd5, 0xd6, 0x39, 0x25, 0x35, 0x3d, 0x50, 0x4d, 0x65, 0x4e, 0x8e, 0xae, 0xab, + 0xa6, 0x42, 0x2b, 0xd5, 0x6a, 0xf9, 0x0d, 0x66, 0xf2, 0x35, 0xb0, 0xa6, 0x0e, 0xbf, 0xc0, 0xa4, + 0xe9, 0x34, 0xd9, 0xc6, 0xa9, 0x71, 0xb6, 0xb8, 0x38, 0x76, 0x52, 0xad, 0x9c, 0x81, 0xc7, 0x9d, + 0x26, 0xd9, 0x63, 0x5c, 0x80, 0xa9, 0x36, 0xb6, 0x79, 0x6a, 0x9c, 0x4d, 0xa5, 0xa9, 0x36, 0xcb, + 0x7f, 0x06, 0x1c, 0x0f, 0x31, 0x39, 0x68, 0x70, 0x09, 0xd6, 0x96, 0xba, 0xbe, 0x6b, 0x76, 0xc1, + 0xf6, 0xba, 0xd6, 0xd4, 0xc9, 0x17, 0x88, 0x08, 0x93, 0x32, 0x7d, 0x26, 0xdb, 0xea, 0xbb, 0xfa, + 0x37, 0x5e, 0xc1, 0xbc, 0xa6, 0x22, 0x6d, 0x54, 0x55, 0xee, 0x9e, 0x94, 0xee, 0x3d, 0xe3, 0x98, + 0x11, 0xc8, 0xbd, 0xd8, 0x79, 0x0b, 0xf3, 0xb7, 0x53, 0xf1, 0x13, 0x7c, 0xbc, 0x13, 0x6b, 0x11, + 0xdc, 0x8b, 0x44, 0xf2, 0x28, 0xb8, 0x93, 0x2b, 0x9e, 0xc4, 0xbf, 0x43, 0xce, 0x0e, 0x70, 0x01, + 0xc0, 0x7f, 0x85, 0x5c, 0xfa, 0xb7, 0x5c, 0xc4, 0xcc, 0xc0, 0x23, 0xb0, 0x7e, 0x04, 0xd7, 0xcc, + 0xc4, 0x39, 0xbc, 0x0f, 0xfd, 0x90, 0xdf, 0xf8, 0x82, 0x33, 0x0b, 0x4f, 0x80, 0x0d, 0xbf, 0xe4, + 0x27, 0x97, 0x91, 0x1f, 0x08, 0x36, 0xc1, 0x0f, 0x30, 0x15, 0xde, 0x2d, 0x8f, 0x42, 0x6f, 0xc5, + 0xd9, 0xe1, 0xf9, 0xd7, 0x17, 0xed, 0x38, 0x03, 0x6d, 0x38, 0x19, 0xb5, 0x37, 0x5e, 0xec, 0x07, + 0x22, 0xfa, 0xee, 0x87, 0xec, 0x00, 0xa7, 0x70, 0x18, 0xdc, 0x0b, 0x2e, 0x99, 0x81, 0x33, 0x38, + 0x5a, 0x49, 0xee, 0xc5, 0x81, 0x64, 0xe6, 0xf5, 0xd5, 0x9f, 0xcb, 0x47, 0xd5, 0x3c, 0xb5, 0x99, + 0x93, 0x57, 0xcf, 0xee, 0xb6, 0xcd, 0xe8, 0xa1, 0xa8, 0xfe, 0xba, 0x5a, 0x69, 0x2a, 0x54, 0x49, + 0x3b, 0xf7, 0xed, 0x39, 0x1f, 0xab, 0x24, 0x2f, 0x14, 0x95, 0x4d, 0xf6, 0xae, 0x3f, 0xe3, 0xe5, + 0xff, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa3, 0x58, 0x92, 0x1b, 0xee, 0x01, 0x00, 0x00, } diff --git a/backend/api/go_client/resource_reference.pb.gw.go b/backend/api/go_client/resource_reference.pb.gw.go index 538896f6820..ee8aa8d0959 100755 --- a/backend/api/go_client/resource_reference.pb.gw.go +++ b/backend/api/go_client/resource_reference.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/run.pb.go b/backend/api/go_client/run.pb.go index 8aa2c76f4c9..f9f9d4b8a31 100755 --- a/backend/api/go_client/run.pb.go +++ b/backend/api/go_client/run.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -61,7 +61,7 @@ func (x Run_StorageState) String() string { return proto.EnumName(Run_StorageState_name, int32(x)) } func (Run_StorageState) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{9, 0} + return fileDescriptor_run_63600a8e2f7114d2, []int{9, 0} } type RunMetric_Format int32 @@ -87,7 +87,7 @@ func (x RunMetric_Format) String() string { return proto.EnumName(RunMetric_Format_name, int32(x)) } func (RunMetric_Format) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{12, 0} + return fileDescriptor_run_63600a8e2f7114d2, []int{12, 0} } type ReportRunMetricsResponse_ReportRunMetricResult_Status int32 @@ -119,7 +119,7 @@ func (x ReportRunMetricsResponse_ReportRunMetricResult_Status) String() string { return proto.EnumName(ReportRunMetricsResponse_ReportRunMetricResult_Status_name, int32(x)) } func (ReportRunMetricsResponse_ReportRunMetricResult_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{14, 0, 0} + return fileDescriptor_run_63600a8e2f7114d2, []int{14, 0, 0} } type CreateRunRequest struct { @@ -133,7 +133,7 @@ func (m *CreateRunRequest) Reset() { *m = CreateRunRequest{} } func (m *CreateRunRequest) String() string { return proto.CompactTextString(m) } func (*CreateRunRequest) ProtoMessage() {} func (*CreateRunRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{0} + return fileDescriptor_run_63600a8e2f7114d2, []int{0} } func (m *CreateRunRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreateRunRequest.Unmarshal(m, b) @@ -171,7 +171,7 @@ func (m *GetRunRequest) Reset() { *m = GetRunRequest{} } func (m *GetRunRequest) String() string { return proto.CompactTextString(m) } func (*GetRunRequest) ProtoMessage() {} func (*GetRunRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{1} + return fileDescriptor_run_63600a8e2f7114d2, []int{1} } func (m *GetRunRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetRunRequest.Unmarshal(m, b) @@ -213,7 +213,7 @@ func (m *ListRunsRequest) Reset() { *m = ListRunsRequest{} } func (m *ListRunsRequest) String() string { return proto.CompactTextString(m) } func (*ListRunsRequest) ProtoMessage() {} func (*ListRunsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{2} + return fileDescriptor_run_63600a8e2f7114d2, []int{2} } func (m *ListRunsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListRunsRequest.Unmarshal(m, b) @@ -279,7 +279,7 @@ func (m *TerminateRunRequest) Reset() { *m = TerminateRunRequest{} } func (m *TerminateRunRequest) String() string { return proto.CompactTextString(m) } func (*TerminateRunRequest) ProtoMessage() {} func (*TerminateRunRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{3} + return fileDescriptor_run_63600a8e2f7114d2, []int{3} } func (m *TerminateRunRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TerminateRunRequest.Unmarshal(m, b) @@ -317,7 +317,7 @@ func (m *RetryRunRequest) Reset() { *m = RetryRunRequest{} } func (m *RetryRunRequest) String() string { return proto.CompactTextString(m) } func (*RetryRunRequest) ProtoMessage() {} func (*RetryRunRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{4} + return fileDescriptor_run_63600a8e2f7114d2, []int{4} } func (m *RetryRunRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RetryRunRequest.Unmarshal(m, b) @@ -357,7 +357,7 @@ func (m *ListRunsResponse) Reset() { *m = ListRunsResponse{} } func (m *ListRunsResponse) String() string { return proto.CompactTextString(m) } func (*ListRunsResponse) ProtoMessage() {} func (*ListRunsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{5} + return fileDescriptor_run_63600a8e2f7114d2, []int{5} } func (m *ListRunsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ListRunsResponse.Unmarshal(m, b) @@ -409,7 +409,7 @@ func (m *ArchiveRunRequest) Reset() { *m = ArchiveRunRequest{} } func (m *ArchiveRunRequest) String() string { return proto.CompactTextString(m) } func (*ArchiveRunRequest) ProtoMessage() {} func (*ArchiveRunRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{6} + return fileDescriptor_run_63600a8e2f7114d2, []int{6} } func (m *ArchiveRunRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ArchiveRunRequest.Unmarshal(m, b) @@ -447,7 +447,7 @@ func (m *UnarchiveRunRequest) Reset() { *m = UnarchiveRunRequest{} } func (m *UnarchiveRunRequest) String() string { return proto.CompactTextString(m) } func (*UnarchiveRunRequest) ProtoMessage() {} func (*UnarchiveRunRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{7} + return fileDescriptor_run_63600a8e2f7114d2, []int{7} } func (m *UnarchiveRunRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UnarchiveRunRequest.Unmarshal(m, b) @@ -485,7 +485,7 @@ func (m *DeleteRunRequest) Reset() { *m = DeleteRunRequest{} } func (m *DeleteRunRequest) String() string { return proto.CompactTextString(m) } func (*DeleteRunRequest) ProtoMessage() {} func (*DeleteRunRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{8} + return fileDescriptor_run_63600a8e2f7114d2, []int{8} } func (m *DeleteRunRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DeleteRunRequest.Unmarshal(m, b) @@ -519,6 +519,7 @@ type Run struct { Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` PipelineSpec *PipelineSpec `protobuf:"bytes,4,opt,name=pipeline_spec,json=pipelineSpec,proto3" json:"pipeline_spec,omitempty"` ResourceReferences []*ResourceReference `protobuf:"bytes,5,rep,name=resource_references,json=resourceReferences,proto3" json:"resource_references,omitempty"` + ServiceAccount string `protobuf:"bytes,14,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` CreatedAt *timestamp.Timestamp `protobuf:"bytes,6,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` ScheduledAt *timestamp.Timestamp `protobuf:"bytes,7,opt,name=scheduled_at,json=scheduledAt,proto3" json:"scheduled_at,omitempty"` FinishedAt *timestamp.Timestamp `protobuf:"bytes,13,opt,name=finished_at,json=finishedAt,proto3" json:"finished_at,omitempty"` @@ -534,7 +535,7 @@ func (m *Run) Reset() { *m = Run{} } func (m *Run) String() string { return proto.CompactTextString(m) } func (*Run) ProtoMessage() {} func (*Run) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{9} + return fileDescriptor_run_63600a8e2f7114d2, []int{9} } func (m *Run) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Run.Unmarshal(m, b) @@ -596,6 +597,13 @@ func (m *Run) GetResourceReferences() []*ResourceReference { return nil } +func (m *Run) GetServiceAccount() string { + if m != nil { + return m.ServiceAccount + } + return "" +} + func (m *Run) GetCreatedAt() *timestamp.Timestamp { if m != nil { return m.CreatedAt @@ -650,7 +658,7 @@ func (m *PipelineRuntime) Reset() { *m = PipelineRuntime{} } func (m *PipelineRuntime) String() string { return proto.CompactTextString(m) } func (*PipelineRuntime) ProtoMessage() {} func (*PipelineRuntime) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{10} + return fileDescriptor_run_63600a8e2f7114d2, []int{10} } func (m *PipelineRuntime) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PipelineRuntime.Unmarshal(m, b) @@ -696,7 +704,7 @@ func (m *RunDetail) Reset() { *m = RunDetail{} } func (m *RunDetail) String() string { return proto.CompactTextString(m) } func (*RunDetail) ProtoMessage() {} func (*RunDetail) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{11} + return fileDescriptor_run_63600a8e2f7114d2, []int{11} } func (m *RunDetail) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RunDetail.Unmarshal(m, b) @@ -746,7 +754,7 @@ func (m *RunMetric) Reset() { *m = RunMetric{} } func (m *RunMetric) String() string { return proto.CompactTextString(m) } func (*RunMetric) ProtoMessage() {} func (*RunMetric) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{12} + return fileDescriptor_run_63600a8e2f7114d2, []int{12} } func (m *RunMetric) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RunMetric.Unmarshal(m, b) @@ -873,7 +881,7 @@ func (m *ReportRunMetricsRequest) Reset() { *m = ReportRunMetricsRequest func (m *ReportRunMetricsRequest) String() string { return proto.CompactTextString(m) } func (*ReportRunMetricsRequest) ProtoMessage() {} func (*ReportRunMetricsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{13} + return fileDescriptor_run_63600a8e2f7114d2, []int{13} } func (m *ReportRunMetricsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ReportRunMetricsRequest.Unmarshal(m, b) @@ -918,7 +926,7 @@ func (m *ReportRunMetricsResponse) Reset() { *m = ReportRunMetricsRespon func (m *ReportRunMetricsResponse) String() string { return proto.CompactTextString(m) } func (*ReportRunMetricsResponse) ProtoMessage() {} func (*ReportRunMetricsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{14} + return fileDescriptor_run_63600a8e2f7114d2, []int{14} } func (m *ReportRunMetricsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ReportRunMetricsResponse.Unmarshal(m, b) @@ -963,7 +971,7 @@ func (m *ReportRunMetricsResponse_ReportRunMetricResult) String() string { } func (*ReportRunMetricsResponse_ReportRunMetricResult) ProtoMessage() {} func (*ReportRunMetricsResponse_ReportRunMetricResult) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{14, 0} + return fileDescriptor_run_63600a8e2f7114d2, []int{14, 0} } func (m *ReportRunMetricsResponse_ReportRunMetricResult) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ReportRunMetricsResponse_ReportRunMetricResult.Unmarshal(m, b) @@ -1024,7 +1032,7 @@ func (m *ReadArtifactRequest) Reset() { *m = ReadArtifactRequest{} } func (m *ReadArtifactRequest) String() string { return proto.CompactTextString(m) } func (*ReadArtifactRequest) ProtoMessage() {} func (*ReadArtifactRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{15} + return fileDescriptor_run_63600a8e2f7114d2, []int{15} } func (m *ReadArtifactRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ReadArtifactRequest.Unmarshal(m, b) @@ -1076,7 +1084,7 @@ func (m *ReadArtifactResponse) Reset() { *m = ReadArtifactResponse{} } func (m *ReadArtifactResponse) String() string { return proto.CompactTextString(m) } func (*ReadArtifactResponse) ProtoMessage() {} func (*ReadArtifactResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_run_d3e6e7c711d57876, []int{16} + return fileDescriptor_run_63600a8e2f7114d2, []int{16} } func (m *ReadArtifactResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ReadArtifactResponse.Unmarshal(m, b) @@ -1496,106 +1504,107 @@ var _RunService_serviceDesc = grpc.ServiceDesc{ Metadata: "backend/api/run.proto", } -func init() { proto.RegisterFile("backend/api/run.proto", fileDescriptor_run_d3e6e7c711d57876) } - -var fileDescriptor_run_d3e6e7c711d57876 = []byte{ - // 1559 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x57, 0xdd, 0x4e, 0x1b, 0x49, - 0x16, 0xa6, 0x6d, 0xb0, 0xf1, 0xb1, 0x0d, 0x4d, 0xf1, 0xe7, 0x38, 0x20, 0x48, 0x93, 0x1f, 0x92, - 0x0d, 0xb6, 0x42, 0x56, 0x2b, 0x2d, 0xab, 0xd5, 0xaa, 0x01, 0x87, 0x78, 0x03, 0x86, 0x2d, 0x1b, - 0x56, 0xca, 0x5e, 0xb4, 0xda, 0xed, 0xb2, 0xe9, 0xc5, 0xee, 0xee, 0xad, 0xaa, 0x86, 0x25, 0x51, - 0x6e, 0x46, 0xca, 0x0b, 0xcc, 0x5c, 0xcc, 0xdd, 0x3c, 0x44, 0x1e, 0x62, 0xa4, 0xb9, 0x9e, 0xfb, - 0xb9, 0x9a, 0x07, 0x19, 0x75, 0x55, 0xb7, 0xd3, 0xfe, 0xc1, 0x48, 0xb9, 0x82, 0x3a, 0xe7, 0x3b, - 0x3f, 0x3e, 0xe7, 0x7c, 0xc7, 0xc7, 0xb0, 0xdc, 0x34, 0xad, 0x2b, 0xe2, 0xb4, 0xca, 0xa6, 0x67, - 0x97, 0xa9, 0xef, 0x94, 0x3c, 0xea, 0x72, 0x17, 0x25, 0x4d, 0xcf, 0x2e, 0xae, 0xc6, 0x75, 0x84, - 0x52, 0x97, 0x4a, 0x6d, 0xf1, 0x61, 0xc7, 0x75, 0x3b, 0x5d, 0x52, 0x16, 0xaf, 0xa6, 0xdf, 0x2e, - 0x93, 0x9e, 0xc7, 0x6f, 0x43, 0xe5, 0x5a, 0xa8, 0x0c, 0x8c, 0x4c, 0xc7, 0x71, 0xb9, 0xc9, 0x6d, - 0xd7, 0x61, 0xa1, 0x76, 0x63, 0xd8, 0x94, 0xdb, 0x3d, 0xc2, 0xb8, 0xd9, 0xf3, 0x22, 0x40, 0x3c, - 0xa8, 0x67, 0x7b, 0xa4, 0x6b, 0x3b, 0xc4, 0x60, 0x1e, 0xb1, 0x42, 0xc0, 0xe3, 0x81, 0x8c, 0x09, - 0x73, 0x7d, 0x6a, 0x11, 0x83, 0x92, 0x36, 0xa1, 0xc4, 0xb1, 0x48, 0x88, 0x7a, 0x29, 0xfe, 0x58, - 0x3b, 0x1d, 0xe2, 0xec, 0xb0, 0x1b, 0xb3, 0xd3, 0x21, 0xb4, 0xec, 0x7a, 0x22, 0x93, 0xd1, 0xac, - 0xb4, 0x12, 0xa8, 0x07, 0x94, 0x98, 0x9c, 0x60, 0xdf, 0xc1, 0xe4, 0x7f, 0x3e, 0x61, 0x1c, 0x15, - 0x21, 0x49, 0x7d, 0xa7, 0xa0, 0x6c, 0x2a, 0xdb, 0xd9, 0xdd, 0xd9, 0x92, 0xe9, 0xd9, 0xa5, 0x40, - 0x1b, 0x08, 0xb5, 0xa7, 0x90, 0x3f, 0x22, 0x3c, 0x06, 0x5e, 0x86, 0x14, 0xf5, 0x1d, 0xc3, 0x6e, - 0x09, 0x7c, 0x06, 0xcf, 0x50, 0xdf, 0xa9, 0xb6, 0xb4, 0x9f, 0x15, 0x98, 0x3f, 0xb6, 0x59, 0x80, - 0x64, 0x11, 0x74, 0x1d, 0xc0, 0x33, 0x3b, 0xc4, 0xe0, 0xee, 0x15, 0x71, 0x42, 0x78, 0x26, 0x90, - 0x34, 0x02, 0x01, 0x7a, 0x08, 0xe2, 0x61, 0x30, 0xfb, 0x03, 0x29, 0x24, 0x36, 0x95, 0xed, 0x19, - 0x3c, 0x1b, 0x08, 0xea, 0xf6, 0x07, 0x82, 0x56, 0x21, 0xcd, 0x5c, 0xca, 0x8d, 0xe6, 0x6d, 0x21, - 0x29, 0x0c, 0x53, 0xc1, 0x73, 0xff, 0x16, 0xbd, 0x81, 0x95, 0xd1, 0x52, 0x18, 0x57, 0xe4, 0xb6, - 0x30, 0x2d, 0xf2, 0x57, 0x65, 0xfe, 0x21, 0xe4, 0x1d, 0xb9, 0xc5, 0x4b, 0x11, 0x1e, 0x47, 0xf0, - 0x77, 0xe4, 0x16, 0xad, 0x40, 0xaa, 0x6d, 0x77, 0x39, 0xa1, 0x85, 0x19, 0xe9, 0x5f, 0xbe, 0xb4, - 0x97, 0xb0, 0xd8, 0x20, 0xb4, 0x67, 0x3b, 0x83, 0x35, 0xba, 0xe3, 0x63, 0x6f, 0xc3, 0x3c, 0x26, - 0x9c, 0xde, 0xde, 0x8f, 0xbc, 0x01, 0xf5, 0x6b, 0x7d, 0x98, 0xe7, 0x3a, 0x8c, 0xa0, 0x35, 0x98, - 0xa6, 0xbe, 0xc3, 0x0a, 0xca, 0x66, 0x72, 0xa0, 0xf2, 0x42, 0x1a, 0x94, 0x8f, 0xbb, 0xdc, 0xec, - 0xca, 0x02, 0x25, 0x45, 0x81, 0x32, 0x42, 0x22, 0x2a, 0xf4, 0x14, 0xe6, 0x1d, 0xf2, 0x7f, 0x6e, - 0xc4, 0x4a, 0x9c, 0x10, 0x01, 0xf3, 0x81, 0xf8, 0x2c, 0x2a, 0xb3, 0xb6, 0x05, 0x0b, 0x3a, 0xb5, - 0x2e, 0xed, 0xeb, 0xf8, 0xc7, 0x99, 0x83, 0x44, 0x3f, 0xc1, 0x84, 0xdd, 0xd2, 0x9e, 0xc0, 0xe2, - 0xb9, 0x63, 0xde, 0x0b, 0xd3, 0x40, 0x3d, 0x24, 0x5d, 0xc2, 0x27, 0x61, 0x7e, 0x9b, 0x86, 0x24, - 0xf6, 0x9d, 0x61, 0x39, 0x42, 0x30, 0xed, 0x98, 0x3d, 0x12, 0x26, 0x29, 0xfe, 0x47, 0x7b, 0x90, - 0x67, 0xdc, 0xa5, 0x62, 0x0a, 0xb8, 0xc9, 0x49, 0x01, 0x36, 0x95, 0xed, 0xb9, 0xdd, 0xe5, 0xa8, - 0x12, 0xa5, 0xba, 0xd4, 0xd6, 0x03, 0x25, 0xce, 0xb1, 0xd8, 0x0b, 0x6d, 0x42, 0xb6, 0x45, 0x98, - 0x45, 0x6d, 0x31, 0xeb, 0xe1, 0x94, 0xc4, 0x45, 0xe8, 0x2f, 0x90, 0x1f, 0xa0, 0x55, 0x38, 0x21, - 0x0b, 0xc2, 0xfb, 0x59, 0xa8, 0xa9, 0x7b, 0xc4, 0xc2, 0x39, 0x2f, 0xf6, 0x42, 0x47, 0xb0, 0x38, - 0x3a, 0x62, 0xac, 0x30, 0x23, 0xba, 0xb4, 0x32, 0x30, 0x5f, 0xfd, 0x91, 0xc2, 0x68, 0x64, 0xca, - 0x18, 0xfa, 0x2b, 0x80, 0x25, 0xc8, 0xd6, 0x32, 0x4c, 0x5e, 0x48, 0x89, 0xe8, 0xc5, 0x92, 0xdc, - 0x0b, 0xa5, 0x68, 0x2f, 0x94, 0x1a, 0xd1, 0x5e, 0xc0, 0x99, 0x10, 0xad, 0x73, 0xf4, 0x77, 0xc8, - 0x31, 0xeb, 0x92, 0xb4, 0xfc, 0xae, 0x34, 0x4e, 0xdf, 0x6b, 0x9c, 0xed, 0xe3, 0x75, 0x8e, 0xfe, - 0x06, 0xd9, 0xb6, 0xed, 0xd8, 0xec, 0x52, 0x5a, 0xe7, 0xef, 0xb5, 0x86, 0x08, 0xae, 0xf3, 0x80, - 0x1a, 0x41, 0x37, 0x7c, 0x56, 0x98, 0x0d, 0xa9, 0x27, 0x5e, 0x68, 0x09, 0x66, 0xc4, 0x6e, 0x2c, - 0xe4, 0xe4, 0x60, 0x8b, 0x07, 0xda, 0x86, 0x74, 0x8f, 0x70, 0x6a, 0x5b, 0xac, 0x90, 0x11, 0x15, - 0x9a, 0x8b, 0xba, 0x77, 0x22, 0xc4, 0x38, 0x52, 0x6b, 0x15, 0xc8, 0xc5, 0xfb, 0x89, 0x8a, 0xb0, - 0x52, 0x6f, 0x9c, 0x62, 0xfd, 0xa8, 0x52, 0x6f, 0xe8, 0x8d, 0x8a, 0xa1, 0x5f, 0xe8, 0xd5, 0x63, - 0x7d, 0xff, 0xb8, 0xa2, 0x4e, 0xa1, 0x07, 0xb0, 0x3c, 0xa8, 0xc3, 0x07, 0x6f, 0xab, 0x17, 0x95, - 0x43, 0x55, 0xd1, 0xae, 0x60, 0x3e, 0x6a, 0x1e, 0xf6, 0x9d, 0x60, 0xab, 0xa2, 0x3f, 0xc1, 0x42, - 0xbf, 0xd3, 0x3d, 0xd3, 0xb1, 0xdb, 0x84, 0x71, 0x31, 0x4b, 0x19, 0xac, 0x46, 0x8a, 0x93, 0x50, - 0x1e, 0x80, 0x6f, 0x5c, 0x7a, 0xd5, 0xee, 0xba, 0x37, 0x5f, 0xc1, 0x59, 0x09, 0x8e, 0x14, 0x11, - 0x58, 0xbb, 0x84, 0x0c, 0xf6, 0x9d, 0x43, 0xc2, 0x4d, 0xbb, 0x3b, 0x69, 0x51, 0xa2, 0x7f, 0x40, - 0x3f, 0x92, 0x41, 0x65, 0x5a, 0x62, 0xd4, 0xb3, 0xbb, 0x4b, 0x03, 0xf3, 0x16, 0xa6, 0x8c, 0xe7, - 0xbd, 0x41, 0x81, 0xf6, 0x8b, 0x22, 0x42, 0xc9, 0xa2, 0xf5, 0xd9, 0xa2, 0xc4, 0xd8, 0xb2, 0x0a, - 0x69, 0xc7, 0x6d, 0x91, 0x60, 0xb5, 0x48, 0x12, 0xa5, 0x82, 0x67, 0xb5, 0x85, 0xb6, 0x20, 0xe7, - 0xf8, 0xbd, 0x26, 0xa1, 0xc6, 0xb5, 0xd9, 0xf5, 0xe5, 0xae, 0x50, 0xde, 0x4e, 0xe1, 0xac, 0x94, - 0x5e, 0x04, 0x42, 0xb4, 0x03, 0xa9, 0xb6, 0x4b, 0x7b, 0x26, 0x17, 0x34, 0x88, 0x91, 0x4c, 0x46, - 0x2c, 0xbd, 0x11, 0x4a, 0x1c, 0x82, 0xb4, 0x5d, 0x48, 0x49, 0x09, 0x9a, 0x87, 0xec, 0x79, 0xad, - 0x7e, 0x56, 0x39, 0xa8, 0xbe, 0xa9, 0x56, 0x0e, 0xd5, 0x29, 0x94, 0x86, 0x24, 0xd6, 0xff, 0xad, - 0x2a, 0x68, 0x0e, 0xe0, 0xac, 0x82, 0x0f, 0x2a, 0xb5, 0x86, 0x7e, 0x54, 0x51, 0x13, 0xfb, 0x69, - 0x98, 0x11, 0x09, 0x68, 0xef, 0x61, 0x15, 0x13, 0xcf, 0xa5, 0xbc, 0xef, 0x9e, 0x4d, 0x5e, 0x8f, - 0xf1, 0x29, 0x4a, 0x4c, 0x9e, 0xa2, 0x9f, 0x92, 0x50, 0x18, 0x75, 0x1e, 0x6e, 0xd4, 0x13, 0x48, - 0x53, 0xc2, 0xfc, 0x2e, 0x8f, 0x96, 0xea, 0xeb, 0x90, 0xae, 0xe3, 0xf1, 0xc3, 0x0a, 0x2c, 0x6c, - 0x71, 0xe4, 0xa3, 0xf8, 0x25, 0x01, 0xcb, 0x63, 0x21, 0x68, 0x03, 0xb2, 0x32, 0x21, 0x23, 0xd6, - 0x26, 0x90, 0xa2, 0x5a, 0xd0, 0xac, 0xc7, 0x30, 0x17, 0x01, 0x06, 0x7a, 0x96, 0x0b, 0x31, 0xb2, - 0x73, 0xb8, 0x4f, 0xb5, 0xa4, 0x68, 0xca, 0xde, 0x37, 0xa4, 0x5b, 0xaa, 0x0b, 0x0f, 0x7d, 0x9a, - 0x16, 0x82, 0x52, 0x32, 0x66, 0x76, 0x88, 0xe8, 0x74, 0x06, 0x47, 0x4f, 0xad, 0x05, 0x29, 0x89, - 0x1d, 0xed, 0x69, 0x0a, 0x12, 0xa7, 0xef, 0x54, 0x05, 0x2d, 0x81, 0x5a, 0xad, 0x5d, 0xe8, 0xc7, - 0xd5, 0x43, 0x43, 0xc7, 0x47, 0xe7, 0x27, 0x95, 0x5a, 0x43, 0x4d, 0xa0, 0x55, 0x58, 0x3c, 0x3c, - 0x3f, 0x3b, 0xae, 0x1e, 0x04, 0x54, 0xc4, 0x95, 0xb3, 0x53, 0xdc, 0xa8, 0xd6, 0x8e, 0xd4, 0x24, - 0x42, 0x30, 0x57, 0xad, 0x35, 0x2a, 0xb8, 0xa6, 0x1f, 0x1b, 0x15, 0x8c, 0x4f, 0xb1, 0x3a, 0xad, - 0xfd, 0x17, 0x16, 0x31, 0x31, 0x5b, 0x3a, 0xe5, 0x76, 0xdb, 0xb4, 0xf8, 0x3d, 0x8d, 0x9f, 0x30, - 0xd4, 0x79, 0x33, 0x74, 0x21, 0x6b, 0x2c, 0x37, 0x7c, 0x2e, 0x12, 0x06, 0x55, 0xd6, 0x5e, 0xc0, - 0xd2, 0x60, 0xac, 0x70, 0x0e, 0x10, 0x4c, 0xb7, 0x4c, 0x6e, 0x8a, 0x50, 0x39, 0x2c, 0xfe, 0xdf, - 0xfd, 0x32, 0x0b, 0x80, 0x7d, 0xa7, 0x4e, 0xe8, 0xb5, 0x6d, 0x11, 0x54, 0x87, 0x4c, 0xff, 0x12, - 0x42, 0x92, 0x0c, 0xc3, 0x97, 0x51, 0xb1, 0x3f, 0x84, 0x72, 0x01, 0x68, 0x1b, 0xdf, 0xfd, 0xfa, - 0xfb, 0x0f, 0x89, 0x07, 0x1a, 0x0a, 0x4e, 0x32, 0x56, 0xbe, 0x7e, 0xd5, 0x24, 0xdc, 0x7c, 0x15, - 0x5c, 0x93, 0x6c, 0x4f, 0x6c, 0x81, 0x7f, 0x41, 0x4a, 0x9e, 0x4b, 0x08, 0x09, 0xd3, 0x81, 0xdb, - 0x69, 0xc4, 0xdd, 0x96, 0x70, 0xb7, 0x8e, 0x1e, 0x8e, 0xba, 0x2b, 0x7f, 0x94, 0xc5, 0xfa, 0x84, - 0xea, 0x30, 0x1b, 0x1d, 0x0e, 0x48, 0xae, 0x92, 0xa1, 0x3b, 0xab, 0xb8, 0x3c, 0x24, 0x95, 0x35, - 0xd0, 0x8a, 0xc2, 0xfb, 0x12, 0x1a, 0x93, 0x2c, 0x22, 0x00, 0x5f, 0x8f, 0x02, 0x24, 0xbf, 0xd3, - 0x46, 0xae, 0x84, 0xe2, 0xca, 0xc8, 0x17, 0x46, 0x25, 0x38, 0x7f, 0xb5, 0x67, 0xc2, 0xf3, 0x23, - 0x6d, 0x63, 0x5c, 0xde, 0x76, 0xeb, 0xd3, 0x5e, 0x78, 0x49, 0xa0, 0x2b, 0xc8, 0xc5, 0xcf, 0x0a, - 0x54, 0x10, 0x81, 0xc6, 0x5c, 0x1a, 0x77, 0x86, 0x7a, 0x2e, 0x42, 0x6d, 0x69, 0x8f, 0xee, 0x0a, - 0xe5, 0x47, 0xce, 0xd0, 0x7f, 0x20, 0xd3, 0x3f, 0x4e, 0xc2, 0x86, 0x0e, 0x1f, 0x2b, 0x77, 0x86, - 0x09, 0x1b, 0xfb, 0x62, 0xf5, 0x8e, 0x30, 0xe8, 0xb3, 0x02, 0xea, 0x30, 0x2d, 0xd1, 0xda, 0x1d, - 0x6c, 0x95, 0xb1, 0xd6, 0x27, 0x72, 0x59, 0xfb, 0xb3, 0x08, 0x59, 0xd2, 0x9e, 0x4f, 0x68, 0xfe, - 0x1e, 0x15, 0xd6, 0xa1, 0xe9, 0x9e, 0xf2, 0x02, 0xfd, 0xa8, 0x40, 0x2e, 0x3e, 0xf1, 0x61, 0x49, - 0xc7, 0x10, 0xae, 0xf8, 0x60, 0x8c, 0x26, 0x8c, 0x8d, 0x45, 0xec, 0x63, 0xf4, 0xcf, 0x09, 0xb1, - 0xcb, 0x01, 0x0f, 0x59, 0xf9, 0x63, 0xc8, 0xce, 0x4f, 0xe5, 0x88, 0x78, 0xac, 0xfc, 0x71, 0x80, - 0x98, 0x41, 0x96, 0x66, 0x0b, 0xb9, 0x90, 0x8b, 0x1f, 0xce, 0x61, 0x62, 0x63, 0x6e, 0xe9, 0x3b, - 0x9b, 0xb0, 0x23, 0xb2, 0x7a, 0xa6, 0x3d, 0x99, 0x94, 0x15, 0x8f, 0x1c, 0x22, 0x0b, 0x66, 0xa3, - 0xdb, 0x3b, 0x24, 0xc6, 0xd0, 0x29, 0xfe, 0x6d, 0x43, 0x15, 0x05, 0xa2, 0x81, 0xb3, 0xfd, 0xcf, - 0xca, 0xf7, 0xfa, 0x09, 0x5e, 0x83, 0x74, 0x8b, 0xb4, 0xcd, 0x60, 0xf1, 0x2f, 0xa0, 0x79, 0xc8, - 0x17, 0xb3, 0x22, 0x9e, 0x5c, 0xa6, 0xef, 0x37, 0x60, 0x1d, 0x52, 0xfb, 0xc4, 0xa4, 0x84, 0xa2, - 0xc5, 0xd9, 0x44, 0x31, 0x6f, 0xfa, 0xfc, 0xd2, 0xa5, 0xf6, 0x07, 0xf1, 0xcb, 0x6b, 0x33, 0xd1, - 0xcc, 0x01, 0xf4, 0x01, 0x53, 0xef, 0x5f, 0x77, 0x6c, 0x7e, 0xe9, 0x37, 0x4b, 0x96, 0xdb, 0x2b, - 0x5f, 0xf9, 0x4d, 0x12, 0xdc, 0x1b, 0xfd, 0xdf, 0x7f, 0xac, 0x1c, 0xff, 0xd1, 0xd7, 0x71, 0x0d, - 0xab, 0x6b, 0x13, 0x87, 0x37, 0x53, 0xe2, 0x23, 0xbc, 0xfe, 0x23, 0x00, 0x00, 0xff, 0xff, 0xd4, - 0xc7, 0xa6, 0xe7, 0xc6, 0x0e, 0x00, 0x00, +func init() { proto.RegisterFile("backend/api/run.proto", fileDescriptor_run_63600a8e2f7114d2) } + +var fileDescriptor_run_63600a8e2f7114d2 = []byte{ + // 1580 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x57, 0xcd, 0x6e, 0xe3, 0xc8, + 0x11, 0x36, 0x25, 0x5b, 0xb2, 0x4a, 0x7f, 0x74, 0xfb, 0x8f, 0xa3, 0xb1, 0x61, 0x2f, 0xbd, 0xbb, + 0xe3, 0x9d, 0xac, 0x25, 0xac, 0x27, 0x08, 0x10, 0x07, 0x41, 0x40, 0xdb, 0x1a, 0xaf, 0x32, 0xb6, + 0xec, 0xb4, 0x64, 0x07, 0x98, 0x1c, 0x08, 0x8a, 0x6a, 0xc9, 0x8c, 0x25, 0x92, 0xe9, 0x6e, 0xda, + 0xf1, 0x0c, 0xe6, 0x12, 0x60, 0x91, 0x7b, 0x72, 0xc8, 0x2d, 0x0f, 0xb1, 0x0f, 0x11, 0x20, 0xe7, + 0xbc, 0x42, 0x1e, 0x64, 0xc1, 0x6e, 0x52, 0x43, 0xfd, 0x58, 0x06, 0xf6, 0x24, 0x75, 0xd5, 0xd7, + 0x55, 0xc5, 0xaa, 0xfa, 0x8a, 0x45, 0x58, 0xef, 0x58, 0xf6, 0x1d, 0x71, 0xbb, 0x35, 0xcb, 0x77, + 0x6a, 0x34, 0x70, 0xab, 0x3e, 0xf5, 0xb8, 0x87, 0xd2, 0x96, 0xef, 0x54, 0x36, 0x93, 0x3a, 0x42, + 0xa9, 0x47, 0xa5, 0xb6, 0xf2, 0xb2, 0xef, 0x79, 0xfd, 0x01, 0xa9, 0x89, 0x53, 0x27, 0xe8, 0xd5, + 0xc8, 0xd0, 0xe7, 0x8f, 0x91, 0x72, 0x2b, 0x52, 0x86, 0x97, 0x2c, 0xd7, 0xf5, 0xb8, 0xc5, 0x1d, + 0xcf, 0x65, 0x91, 0x76, 0x67, 0xf2, 0x2a, 0x77, 0x86, 0x84, 0x71, 0x6b, 0xe8, 0xc7, 0x80, 0xa4, + 0x53, 0xdf, 0xf1, 0xc9, 0xc0, 0x71, 0x89, 0xc9, 0x7c, 0x62, 0x47, 0x80, 0x2f, 0xc7, 0x22, 0x26, + 0xcc, 0x0b, 0xa8, 0x4d, 0x4c, 0x4a, 0x7a, 0x84, 0x12, 0xd7, 0x26, 0x11, 0xea, 0x5b, 0xf1, 0x63, + 0x1f, 0xf4, 0x89, 0x7b, 0xc0, 0x1e, 0xac, 0x7e, 0x9f, 0xd0, 0x9a, 0xe7, 0x8b, 0x48, 0xa6, 0xa3, + 0xd2, 0xab, 0xa0, 0x9e, 0x50, 0x62, 0x71, 0x82, 0x03, 0x17, 0x93, 0xbf, 0x04, 0x84, 0x71, 0x54, + 0x81, 0x34, 0x0d, 0x5c, 0x4d, 0xd9, 0x55, 0xf6, 0xf3, 0x87, 0xcb, 0x55, 0xcb, 0x77, 0xaa, 0xa1, + 0x36, 0x14, 0xea, 0x5f, 0x43, 0xf1, 0x8c, 0xf0, 0x04, 0x78, 0x1d, 0x32, 0x34, 0x70, 0x4d, 0xa7, + 0x2b, 0xf0, 0x39, 0xbc, 0x44, 0x03, 0xb7, 0xd1, 0xd5, 0xff, 0xa3, 0x40, 0xf9, 0xdc, 0x61, 0x21, + 0x92, 0xc5, 0xd0, 0x6d, 0x00, 0xdf, 0xea, 0x13, 0x93, 0x7b, 0x77, 0xc4, 0x8d, 0xe0, 0xb9, 0x50, + 0xd2, 0x0e, 0x05, 0xe8, 0x25, 0x88, 0x83, 0xc9, 0x9c, 0x0f, 0x44, 0x4b, 0xed, 0x2a, 0xfb, 0x4b, + 0x78, 0x39, 0x14, 0xb4, 0x9c, 0x0f, 0x04, 0x6d, 0x42, 0x96, 0x79, 0x94, 0x9b, 0x9d, 0x47, 0x2d, + 0x2d, 0x2e, 0x66, 0xc2, 0xe3, 0xf1, 0x23, 0x7a, 0x0b, 0x1b, 0xd3, 0xa9, 0x30, 0xef, 0xc8, 0xa3, + 0xb6, 0x28, 0xe2, 0x57, 0x65, 0xfc, 0x11, 0xe4, 0x1d, 0x79, 0xc4, 0x6b, 0x31, 0x1e, 0xc7, 0xf0, + 0x77, 0xe4, 0x11, 0x6d, 0x40, 0xa6, 0xe7, 0x0c, 0x38, 0xa1, 0xda, 0x92, 0xb4, 0x2f, 0x4f, 0xfa, + 0xb7, 0xb0, 0xda, 0x26, 0x74, 0xe8, 0xb8, 0xe3, 0x39, 0x7a, 0xe2, 0xb1, 0xf7, 0xa1, 0x8c, 0x09, + 0xa7, 0x8f, 0xcf, 0x23, 0x1f, 0x40, 0xfd, 0x9c, 0x1f, 0xe6, 0x7b, 0x2e, 0x23, 0x68, 0x0b, 0x16, + 0x69, 0xe0, 0x32, 0x4d, 0xd9, 0x4d, 0x8f, 0x65, 0x5e, 0x48, 0xc3, 0xf4, 0x71, 0x8f, 0x5b, 0x03, + 0x99, 0xa0, 0xb4, 0x48, 0x50, 0x4e, 0x48, 0x44, 0x86, 0xbe, 0x86, 0xb2, 0x4b, 0xfe, 0xca, 0xcd, + 0x44, 0x8a, 0x53, 0xc2, 0x61, 0x31, 0x14, 0x5f, 0xc5, 0x69, 0xd6, 0xf7, 0x60, 0xc5, 0xa0, 0xf6, + 0xad, 0x73, 0x9f, 0x7c, 0x9c, 0x12, 0xa4, 0x46, 0x01, 0xa6, 0x9c, 0xae, 0xfe, 0x15, 0xac, 0x5e, + 0xbb, 0xd6, 0xb3, 0x30, 0x1d, 0xd4, 0x53, 0x32, 0x20, 0x7c, 0x1e, 0xe6, 0xef, 0x4b, 0x90, 0xc6, + 0x81, 0x3b, 0x29, 0x47, 0x08, 0x16, 0x5d, 0x6b, 0x48, 0xa2, 0x20, 0xc5, 0x7f, 0x74, 0x04, 0x45, + 0xc6, 0x3d, 0x2a, 0xba, 0x80, 0x5b, 0x9c, 0x68, 0xb0, 0xab, 0xec, 0x97, 0x0e, 0xd7, 0xe3, 0x4c, + 0x54, 0x5b, 0x52, 0xdb, 0x0a, 0x95, 0xb8, 0xc0, 0x12, 0x27, 0xb4, 0x0b, 0xf9, 0x2e, 0x61, 0x36, + 0x75, 0x44, 0xaf, 0x47, 0x5d, 0x92, 0x14, 0xa1, 0x5f, 0x41, 0x71, 0x8c, 0x56, 0x51, 0x87, 0xac, + 0x08, 0xeb, 0x57, 0x91, 0xa6, 0xe5, 0x13, 0x1b, 0x17, 0xfc, 0xc4, 0x09, 0x9d, 0xc1, 0xea, 0x74, + 0x8b, 0x31, 0x6d, 0x49, 0x54, 0x69, 0x63, 0xac, 0xbf, 0x46, 0x2d, 0x85, 0xd1, 0x54, 0x97, 0x31, + 0xf4, 0x0a, 0xca, 0x8c, 0xd0, 0x7b, 0xc7, 0x26, 0xa6, 0x65, 0xdb, 0x5e, 0xe0, 0x72, 0xad, 0x24, + 0xc2, 0x2c, 0x45, 0x62, 0x43, 0x4a, 0xd1, 0xaf, 0x01, 0x6c, 0xc1, 0xca, 0xae, 0x69, 0x71, 0x2d, + 0x23, 0xc2, 0xac, 0x54, 0xe5, 0x00, 0xa9, 0xc6, 0x03, 0xa4, 0xda, 0x8e, 0x07, 0x08, 0xce, 0x45, + 0x68, 0x83, 0xa3, 0xdf, 0x42, 0x81, 0xd9, 0xb7, 0xa4, 0x1b, 0x0c, 0xe4, 0xe5, 0xec, 0xb3, 0x97, + 0xf3, 0x23, 0xbc, 0xc1, 0xd1, 0x6f, 0x20, 0xdf, 0x73, 0x5c, 0x87, 0xdd, 0xca, 0xdb, 0xc5, 0x67, + 0x6f, 0x43, 0x0c, 0x37, 0x78, 0xc8, 0xa1, 0xb0, 0x6c, 0x01, 0xd3, 0x96, 0x23, 0x8e, 0x8a, 0x13, + 0x5a, 0x83, 0x25, 0x31, 0x44, 0xb5, 0x82, 0x64, 0x80, 0x38, 0xa0, 0x7d, 0xc8, 0x0e, 0x09, 0xa7, + 0x8e, 0xcd, 0xb4, 0x9c, 0x48, 0x65, 0x29, 0x2e, 0xf3, 0x85, 0x10, 0xe3, 0x58, 0xad, 0xd7, 0xa1, + 0x90, 0x2c, 0x3c, 0xaa, 0xc0, 0x46, 0xab, 0x7d, 0x89, 0x8d, 0xb3, 0x7a, 0xab, 0x6d, 0xb4, 0xeb, + 0xa6, 0x71, 0x63, 0x34, 0xce, 0x8d, 0xe3, 0xf3, 0xba, 0xba, 0x80, 0x5e, 0xc0, 0xfa, 0xb8, 0x0e, + 0x9f, 0x7c, 0xdf, 0xb8, 0xa9, 0x9f, 0xaa, 0x8a, 0x7e, 0x07, 0xe5, 0xb8, 0xca, 0x38, 0x70, 0xc3, + 0xf1, 0x8b, 0x7e, 0x01, 0x2b, 0xa3, 0x96, 0x18, 0x5a, 0xae, 0xd3, 0x23, 0x8c, 0x8b, 0xa6, 0xcb, + 0x61, 0x35, 0x56, 0x5c, 0x44, 0xf2, 0x10, 0xfc, 0xe0, 0xd1, 0xbb, 0xde, 0xc0, 0x7b, 0xf8, 0x0c, + 0xce, 0x4b, 0x70, 0xac, 0x88, 0xc1, 0xfa, 0x2d, 0xe4, 0x70, 0xe0, 0x9e, 0x12, 0x6e, 0x39, 0x83, + 0x79, 0x13, 0x15, 0xfd, 0x0e, 0x46, 0x9e, 0x4c, 0x2a, 0xc3, 0x12, 0x9c, 0xc8, 0x1f, 0xae, 0x8d, + 0x35, 0x66, 0x14, 0x32, 0x2e, 0xfb, 0xe3, 0x02, 0xfd, 0xbf, 0x8a, 0x70, 0x25, 0x93, 0x36, 0xa2, + 0x95, 0x92, 0xa0, 0xd5, 0x26, 0x64, 0x5d, 0xaf, 0x4b, 0xc2, 0x19, 0x24, 0xd9, 0x96, 0x09, 0x8f, + 0x8d, 0x2e, 0xda, 0x83, 0x82, 0x1b, 0x0c, 0x3b, 0x84, 0x9a, 0xf7, 0xd6, 0x20, 0x90, 0x43, 0x45, + 0xf9, 0x7e, 0x01, 0xe7, 0xa5, 0xf4, 0x26, 0x14, 0xa2, 0x03, 0xc8, 0xf4, 0x3c, 0x3a, 0xb4, 0xb8, + 0xe0, 0x4b, 0x82, 0x8d, 0xd2, 0x63, 0xf5, 0xad, 0x50, 0xe2, 0x08, 0xa4, 0x1f, 0x42, 0x46, 0x4a, + 0x50, 0x19, 0xf2, 0xd7, 0xcd, 0xd6, 0x55, 0xfd, 0xa4, 0xf1, 0xb6, 0x51, 0x3f, 0x55, 0x17, 0x50, + 0x16, 0xd2, 0xd8, 0xf8, 0xa3, 0xaa, 0xa0, 0x12, 0xc0, 0x55, 0x1d, 0x9f, 0xd4, 0x9b, 0x6d, 0xe3, + 0xac, 0xae, 0xa6, 0x8e, 0xb3, 0xb0, 0x24, 0x02, 0xd0, 0xdf, 0xc3, 0x26, 0x26, 0xbe, 0x47, 0xf9, + 0xc8, 0x3c, 0x9b, 0x3f, 0x47, 0x93, 0x5d, 0x94, 0x9a, 0xdf, 0x45, 0xff, 0x4e, 0x83, 0x36, 0x6d, + 0x3c, 0x1a, 0xbd, 0x17, 0x90, 0xa5, 0x84, 0x05, 0x03, 0x1e, 0x4f, 0xdf, 0x37, 0x11, 0xaf, 0x67, + 0xe3, 0x27, 0x15, 0x58, 0xdc, 0xc5, 0xb1, 0x8d, 0xca, 0x8f, 0x29, 0x58, 0x9f, 0x09, 0x41, 0x3b, + 0x90, 0x97, 0x01, 0x99, 0x89, 0x32, 0x81, 0x14, 0x35, 0xc3, 0x62, 0x7d, 0x09, 0xa5, 0x18, 0x30, + 0x56, 0xb3, 0x42, 0x84, 0x91, 0x95, 0xc3, 0x23, 0xaa, 0xa5, 0x45, 0x51, 0x8e, 0x7e, 0x46, 0xb8, + 0xd5, 0x96, 0xb0, 0x30, 0xa2, 0xa9, 0x16, 0xa6, 0x92, 0x31, 0xab, 0x4f, 0x44, 0xa5, 0x73, 0x38, + 0x3e, 0xea, 0x5d, 0xc8, 0x48, 0xec, 0x74, 0x4d, 0x33, 0x90, 0xba, 0x7c, 0xa7, 0x2a, 0x68, 0x0d, + 0xd4, 0x46, 0xf3, 0xc6, 0x38, 0x6f, 0x9c, 0x9a, 0x06, 0x3e, 0xbb, 0xbe, 0xa8, 0x37, 0xdb, 0x6a, + 0x0a, 0x6d, 0xc2, 0xea, 0xe9, 0xf5, 0xd5, 0x79, 0xe3, 0x24, 0xa4, 0x22, 0xae, 0x5f, 0x5d, 0xe2, + 0x76, 0xa3, 0x79, 0xa6, 0xa6, 0x11, 0x82, 0x52, 0xa3, 0xd9, 0xae, 0xe3, 0xa6, 0x71, 0x6e, 0xd6, + 0x31, 0xbe, 0xc4, 0xea, 0xa2, 0xfe, 0x67, 0x58, 0xc5, 0xc4, 0xea, 0x1a, 0x94, 0x3b, 0x3d, 0xcb, + 0xe6, 0xcf, 0x14, 0x7e, 0x4e, 0x53, 0x17, 0xad, 0xc8, 0x84, 0xcc, 0xb1, 0x7c, 0x15, 0x14, 0x62, + 0x61, 0x98, 0x65, 0xfd, 0x35, 0xac, 0x8d, 0xfb, 0x8a, 0xfa, 0x00, 0xc1, 0x62, 0xd7, 0xe2, 0x96, + 0x70, 0x55, 0xc0, 0xe2, 0xff, 0xe1, 0x8f, 0xcb, 0x00, 0x38, 0x70, 0x5b, 0x72, 0x46, 0xa3, 0x16, + 0xe4, 0x46, 0x2b, 0x13, 0x92, 0x64, 0x98, 0x5c, 0xa1, 0x2a, 0xa3, 0x26, 0x94, 0x03, 0x40, 0xdf, + 0xf9, 0xdb, 0xff, 0xfe, 0xff, 0xcf, 0xd4, 0x0b, 0x1d, 0x85, 0xbb, 0x1b, 0xab, 0xdd, 0x7f, 0xd7, + 0x21, 0xdc, 0xfa, 0x2e, 0x5c, 0x3b, 0xd9, 0x91, 0x98, 0x02, 0x7f, 0x80, 0x8c, 0xdc, 0xab, 0x10, + 0x12, 0x57, 0xc7, 0x96, 0xac, 0x29, 0x73, 0x7b, 0xc2, 0xdc, 0x36, 0x7a, 0x39, 0x6d, 0xae, 0xf6, + 0x51, 0x26, 0xeb, 0x13, 0x6a, 0xc1, 0x72, 0xbc, 0x61, 0x20, 0x39, 0x4a, 0x26, 0x16, 0xb2, 0xca, + 0xfa, 0x84, 0x54, 0xe6, 0x40, 0xaf, 0x08, 0xeb, 0x6b, 0x68, 0x46, 0xb0, 0x88, 0x00, 0x7c, 0xde, + 0x1e, 0x90, 0x7c, 0xf9, 0x4d, 0xad, 0x13, 0x95, 0x8d, 0xa9, 0x17, 0x46, 0x3d, 0xdc, 0x93, 0xf5, + 0x57, 0xc2, 0xf2, 0x17, 0xfa, 0xce, 0xac, 0xb8, 0x9d, 0xee, 0xa7, 0xa3, 0x68, 0xe5, 0x40, 0x77, + 0x50, 0x48, 0xee, 0x1f, 0x48, 0x13, 0x8e, 0x66, 0xac, 0x24, 0x4f, 0xba, 0xfa, 0x46, 0xb8, 0xda, + 0xd3, 0xbf, 0x78, 0xca, 0x55, 0x10, 0x1b, 0x43, 0x7f, 0x82, 0xdc, 0x68, 0x8b, 0x89, 0x0a, 0x3a, + 0xb9, 0xd5, 0x3c, 0xe9, 0x26, 0x2a, 0xec, 0xeb, 0xcd, 0x27, 0xdc, 0xa0, 0x1f, 0x14, 0x50, 0x27, + 0x69, 0x89, 0xb6, 0x9e, 0x60, 0xab, 0xf4, 0xb5, 0x3d, 0x97, 0xcb, 0xfa, 0x2f, 0x85, 0xcb, 0xaa, + 0xfe, 0xcd, 0x9c, 0xe2, 0x1f, 0x51, 0x71, 0x3b, 0xba, 0x7a, 0xa4, 0xbc, 0x46, 0xff, 0x52, 0xa0, + 0x90, 0xec, 0xf8, 0x28, 0xa5, 0x33, 0x08, 0x57, 0x79, 0x31, 0x43, 0x13, 0xf9, 0xc6, 0xc2, 0xf7, + 0x39, 0xfa, 0xfd, 0x1c, 0xdf, 0xb5, 0x90, 0x87, 0xac, 0xf6, 0x31, 0x62, 0xe7, 0xa7, 0x5a, 0x4c, + 0x3c, 0x56, 0xfb, 0x38, 0x46, 0xcc, 0x30, 0x4a, 0xab, 0x8b, 0x3c, 0x28, 0x24, 0x37, 0xec, 0x28, + 0xb0, 0x19, 0x4b, 0xf7, 0x93, 0x45, 0x38, 0x10, 0x51, 0xbd, 0xd2, 0xbf, 0x9a, 0x17, 0x15, 0x8f, + 0x0d, 0x22, 0x1b, 0x96, 0xe3, 0x25, 0x3d, 0x22, 0xc6, 0xc4, 0xce, 0xfe, 0xf3, 0x9a, 0x2a, 0x76, + 0x44, 0x43, 0x63, 0xc7, 0x3f, 0x28, 0xff, 0x30, 0x2e, 0xf0, 0x16, 0x64, 0xbb, 0xa4, 0x67, 0x85, + 0x83, 0x7f, 0x05, 0x95, 0xa1, 0x58, 0xc9, 0x0b, 0x7f, 0x72, 0x98, 0xbe, 0xdf, 0x81, 0x6d, 0xc8, + 0x1c, 0x13, 0x8b, 0x12, 0x8a, 0x56, 0x97, 0x53, 0x95, 0xa2, 0x15, 0xf0, 0x5b, 0x8f, 0x3a, 0x1f, + 0xc4, 0x27, 0xda, 0x6e, 0xaa, 0x53, 0x00, 0x18, 0x01, 0x16, 0xde, 0xbf, 0xe9, 0x3b, 0xfc, 0x36, + 0xe8, 0x54, 0x6d, 0x6f, 0x58, 0xbb, 0x0b, 0x3a, 0x24, 0xdc, 0x37, 0x46, 0x1f, 0x8a, 0xac, 0x96, + 0xfc, 0x3a, 0xec, 0x7b, 0xa6, 0x3d, 0x70, 0x88, 0xcb, 0x3b, 0x19, 0xf1, 0x08, 0x6f, 0x7e, 0x0a, + 0x00, 0x00, 0xff, 0xff, 0xd6, 0x6b, 0x76, 0xde, 0xef, 0x0e, 0x00, 0x00, } diff --git a/backend/api/go_client/run.pb.gw.go b/backend/api/go_client/run.pb.gw.go index df7e25cfb41..5d91413d2ac 100755 --- a/backend/api/go_client/run.pb.gw.go +++ b/backend/api/go_client/run.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_client/visualization.pb.go b/backend/api/go_client/visualization.pb.go index c8a53443c78..506c61da71e 100755 --- a/backend/api/go_client/visualization.pb.go +++ b/backend/api/go_client/visualization.pb.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -68,11 +68,12 @@ func (x Visualization_Type) String() string { return proto.EnumName(Visualization_Type_name, int32(x)) } func (Visualization_Type) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_visualization_92a3ef47923a838d, []int{1, 0} + return fileDescriptor_visualization_5c1ff36cbdec23a0, []int{1, 0} } type CreateVisualizationRequest struct { Visualization *Visualization `protobuf:"bytes,1,opt,name=visualization,proto3" json:"visualization,omitempty"` + Namespace string `protobuf:"bytes,2,opt,name=namespace,proto3" json:"namespace,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -82,7 +83,7 @@ func (m *CreateVisualizationRequest) Reset() { *m = CreateVisualizationR func (m *CreateVisualizationRequest) String() string { return proto.CompactTextString(m) } func (*CreateVisualizationRequest) ProtoMessage() {} func (*CreateVisualizationRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_visualization_92a3ef47923a838d, []int{0} + return fileDescriptor_visualization_5c1ff36cbdec23a0, []int{0} } func (m *CreateVisualizationRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreateVisualizationRequest.Unmarshal(m, b) @@ -109,6 +110,13 @@ func (m *CreateVisualizationRequest) GetVisualization() *Visualization { return nil } +func (m *CreateVisualizationRequest) GetNamespace() string { + if m != nil { + return m.Namespace + } + return "" +} + type Visualization struct { Type Visualization_Type `protobuf:"varint,1,opt,name=type,proto3,enum=api.Visualization_Type" json:"type,omitempty"` Source string `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"` @@ -124,7 +132,7 @@ func (m *Visualization) Reset() { *m = Visualization{} } func (m *Visualization) String() string { return proto.CompactTextString(m) } func (*Visualization) ProtoMessage() {} func (*Visualization) Descriptor() ([]byte, []int) { - return fileDescriptor_visualization_92a3ef47923a838d, []int{1} + return fileDescriptor_visualization_5c1ff36cbdec23a0, []int{1} } func (m *Visualization) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Visualization.Unmarshal(m, b) @@ -258,38 +266,40 @@ var _VisualizationService_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("backend/api/visualization.proto", fileDescriptor_visualization_92a3ef47923a838d) -} - -var fileDescriptor_visualization_92a3ef47923a838d = []byte{ - // 462 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xcd, 0x6e, 0xd3, 0x40, - 0x10, 0xc7, 0xeb, 0xc4, 0x09, 0xcd, 0x14, 0x83, 0xd9, 0x56, 0xd4, 0xb2, 0x82, 0x12, 0xf9, 0x54, - 0x09, 0x62, 0xab, 0xc9, 0x05, 0x71, 0x22, 0x09, 0xe5, 0x44, 0x54, 0xc9, 0x49, 0x73, 0xe8, 0xa5, - 0x5a, 0xbb, 0x53, 0x67, 0x55, 0xc7, 0x6b, 0xf6, 0x23, 0x55, 0xb9, 0x20, 0x21, 0xf1, 0x02, 0x70, - 0xe0, 0xc1, 0xb8, 0x73, 0xe2, 0x41, 0x50, 0x37, 0x11, 0xc4, 0x6a, 0x39, 0x79, 0x3e, 0x7e, 0xf3, - 0xf7, 0xf8, 0xef, 0x81, 0x4e, 0x42, 0xd3, 0x6b, 0x2c, 0x2e, 0x23, 0x5a, 0xb2, 0x68, 0xc5, 0xa4, - 0xa6, 0x39, 0xfb, 0x44, 0x15, 0xe3, 0x45, 0x58, 0x0a, 0xae, 0x38, 0xa9, 0xd3, 0x92, 0xf9, 0xed, - 0x8c, 0xf3, 0x2c, 0x47, 0x03, 0xd1, 0xa2, 0xe0, 0xca, 0x10, 0x72, 0x8d, 0xf8, 0x87, 0xdb, 0x1a, - 0x28, 0x04, 0x17, 0x9b, 0xc6, 0x2b, 0xf3, 0x48, 0x7b, 0x19, 0x16, 0x3d, 0x79, 0x43, 0xb3, 0x0c, - 0x45, 0xc4, 0x4b, 0x33, 0x7a, 0x5f, 0x26, 0x98, 0x83, 0x3f, 0x16, 0x48, 0x15, 0xce, 0xb7, 0xd7, - 0x88, 0xf1, 0xa3, 0x46, 0xa9, 0xc8, 0x6b, 0x70, 0x2a, 0xeb, 0x79, 0x56, 0xd7, 0x3a, 0xda, 0xeb, - 0x93, 0x90, 0x96, 0x2c, 0xac, 0x4e, 0x54, 0xc1, 0xe0, 0x97, 0x05, 0x4e, 0x05, 0x20, 0x2f, 0xc1, - 0x56, 0xb7, 0x25, 0x1a, 0x89, 0x27, 0xfd, 0xc3, 0xfb, 0x12, 0xe1, 0xec, 0xb6, 0xc4, 0xd8, 0x40, - 0xe4, 0x39, 0x34, 0x25, 0xd7, 0x22, 0x45, 0xaf, 0xd6, 0xb5, 0x8e, 0x5a, 0xf1, 0x26, 0x23, 0x6d, - 0x68, 0x51, 0x91, 0xe9, 0x25, 0x16, 0x4a, 0x7a, 0x75, 0xd3, 0xfa, 0x57, 0x20, 0x04, 0xec, 0x85, - 0x5a, 0xe6, 0x9e, 0x6d, 0x1a, 0x26, 0x26, 0x07, 0xd0, 0x30, 0xee, 0x78, 0x0d, 0x53, 0x5c, 0x27, - 0xc1, 0x5b, 0xb0, 0xef, 0xde, 0x46, 0x1c, 0x68, 0xc5, 0xa7, 0xe3, 0x8b, 0xf1, 0x59, 0x3c, 0x3f, - 0x71, 0x77, 0xc8, 0x2e, 0xd8, 0xb3, 0xf7, 0xef, 0xe6, 0xae, 0xb5, 0x8e, 0x26, 0x43, 0xb7, 0x46, - 0x5a, 0xd0, 0x98, 0x0d, 0x47, 0x1f, 0x4e, 0xdc, 0x3a, 0x01, 0x68, 0x8e, 0xcf, 0xa6, 0xb3, 0xd3, - 0x89, 0x6b, 0xf7, 0x7f, 0x58, 0x70, 0x50, 0x59, 0x7f, 0x8a, 0x62, 0xc5, 0x52, 0x24, 0x9f, 0x61, - 0xff, 0x01, 0x47, 0x49, 0xc7, 0x7c, 0xf0, 0xff, 0xbd, 0xf6, 0x1f, 0x30, 0x35, 0x18, 0x7c, 0xf9, - 0xf9, 0xfb, 0x7b, 0xad, 0x17, 0xb4, 0xef, 0xfe, 0xb2, 0x8c, 0x56, 0xc7, 0x09, 0x2a, 0x7a, 0x5c, - 0x3d, 0x19, 0xf9, 0xa6, 0x6a, 0xfd, 0xe8, 0xab, 0xf5, 0x6d, 0x38, 0x89, 0xdb, 0xf0, 0xe8, 0x12, - 0xaf, 0xa8, 0xce, 0x15, 0x79, 0x46, 0x9e, 0x82, 0xe3, 0xef, 0x19, 0xfd, 0xa9, 0xa2, 0x4a, 0xcb, - 0xf3, 0x0e, 0xbc, 0x80, 0xe6, 0x08, 0xa9, 0x40, 0x41, 0xf6, 0x77, 0x6b, 0xbe, 0x43, 0xb5, 0x5a, - 0x70, 0xb1, 0x91, 0xe8, 0xd6, 0x92, 0xc7, 0x00, 0x7f, 0x81, 0x9d, 0xf3, 0x41, 0xc6, 0xd4, 0x42, - 0x27, 0x61, 0xca, 0x97, 0xd1, 0xb5, 0x4e, 0xf0, 0x2a, 0xe7, 0x37, 0x51, 0xc9, 0x4a, 0xcc, 0x59, - 0x81, 0x32, 0xda, 0x3e, 0xc5, 0x8c, 0x5f, 0xa4, 0x39, 0xc3, 0x42, 0x25, 0x4d, 0x73, 0x61, 0x83, - 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x8b, 0x6b, 0x61, 0xd1, 0xee, 0x02, 0x00, 0x00, + proto.RegisterFile("backend/api/visualization.proto", fileDescriptor_visualization_5c1ff36cbdec23a0) +} + +var fileDescriptor_visualization_5c1ff36cbdec23a0 = []byte{ + // 482 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x52, 0xc1, 0x6e, 0xd3, 0x40, + 0x10, 0xad, 0x13, 0x27, 0x34, 0x53, 0x02, 0x61, 0x5b, 0x51, 0xcb, 0x0a, 0x4a, 0xe4, 0x53, 0x24, + 0xa8, 0xad, 0xa6, 0x17, 0xe0, 0x02, 0x49, 0x28, 0x27, 0xa2, 0x4a, 0x4e, 0x9a, 0x43, 0x2f, 0xd5, + 0xda, 0x9d, 0x3a, 0xab, 0x3a, 0xde, 0x65, 0x77, 0x9d, 0xaa, 0x20, 0x2e, 0x48, 0x9c, 0xb8, 0xc1, + 0x2f, 0xf0, 0x47, 0xdc, 0x39, 0xf1, 0x21, 0x28, 0x9b, 0x28, 0xc4, 0x6a, 0x7b, 0xf2, 0xee, 0xec, + 0x9b, 0xf7, 0xc6, 0x6f, 0x1e, 0xb4, 0x22, 0x1a, 0x5f, 0x61, 0x76, 0x11, 0x50, 0xc1, 0x82, 0x39, + 0x53, 0x39, 0x4d, 0xd9, 0x27, 0xaa, 0x19, 0xcf, 0x7c, 0x21, 0xb9, 0xe6, 0xa4, 0x4c, 0x05, 0x73, + 0x9b, 0x09, 0xe7, 0x49, 0x8a, 0x06, 0x44, 0xb3, 0x8c, 0x6b, 0x83, 0x50, 0x4b, 0x88, 0xbb, 0xbf, + 0xc9, 0x81, 0x52, 0x72, 0xb9, 0x7a, 0x78, 0x61, 0x3e, 0xf1, 0x41, 0x82, 0xd9, 0x81, 0xba, 0xa6, + 0x49, 0x82, 0x32, 0xe0, 0xc2, 0xb4, 0xde, 0xa6, 0xf1, 0x34, 0xb8, 0x03, 0x89, 0x54, 0xe3, 0x64, + 0x73, 0x8c, 0x10, 0x3f, 0xe6, 0xa8, 0x34, 0x79, 0x09, 0xf5, 0xc2, 0x78, 0x8e, 0xd5, 0xb6, 0x3a, + 0x3b, 0x5d, 0xe2, 0x53, 0xc1, 0xfc, 0x62, 0x47, 0x11, 0x48, 0x9a, 0x50, 0xcb, 0xe8, 0x0c, 0x95, + 0xa0, 0x31, 0x3a, 0xa5, 0xb6, 0xd5, 0xa9, 0x85, 0xff, 0x0b, 0xde, 0x1f, 0x0b, 0xea, 0x85, 0x76, + 0xf2, 0x1c, 0x6c, 0x7d, 0x23, 0xd0, 0x08, 0x3c, 0xea, 0xee, 0xdf, 0x16, 0xf0, 0xc7, 0x37, 0x02, + 0x43, 0x03, 0x22, 0x4f, 0xa1, 0xaa, 0x78, 0x2e, 0xd7, 0xcc, 0xab, 0xdb, 0x42, 0x94, 0xca, 0x24, + 0x9f, 0x61, 0xa6, 0x95, 0x53, 0x5e, 0x8a, 0xae, 0x0b, 0x84, 0x80, 0x3d, 0xd5, 0xb3, 0xd4, 0xb1, + 0xcd, 0x83, 0x39, 0x93, 0x3d, 0xa8, 0x18, 0xef, 0x9c, 0x8a, 0x29, 0x2e, 0x2f, 0xde, 0x5b, 0xb0, + 0x17, 0x6a, 0xa4, 0x0e, 0xb5, 0xf0, 0x64, 0x70, 0x3e, 0x38, 0x0d, 0x27, 0xc7, 0x8d, 0x2d, 0xb2, + 0x0d, 0xf6, 0xf8, 0xfd, 0xbb, 0x49, 0xc3, 0x5a, 0x9e, 0x86, 0xbd, 0x46, 0x89, 0xd4, 0xa0, 0x32, + 0xee, 0xf5, 0x3f, 0x1c, 0x37, 0xca, 0x04, 0xa0, 0x3a, 0x38, 0x1d, 0x8d, 0x4f, 0x86, 0x0d, 0xbb, + 0xfb, 0xcb, 0x82, 0xbd, 0xc2, 0xf8, 0x23, 0x94, 0x73, 0x16, 0x23, 0xf9, 0x6e, 0xc1, 0xee, 0x1d, + 0x86, 0x93, 0x96, 0xf9, 0xe3, 0xfb, 0x57, 0xe1, 0xde, 0xe1, 0xb9, 0xf7, 0xe6, 0xeb, 0xef, 0xbf, + 0x3f, 0x4b, 0xaf, 0xbc, 0xce, 0x22, 0x04, 0x2a, 0x98, 0x1f, 0x46, 0xa8, 0xe9, 0x61, 0x31, 0x51, + 0x2a, 0xf8, 0xbc, 0xf6, 0xfd, 0xcb, 0xeb, 0xe2, 0x96, 0xfa, 0xdf, 0xac, 0x1f, 0xbd, 0x61, 0xd8, + 0x84, 0x07, 0x17, 0x78, 0x49, 0xf3, 0x54, 0x93, 0x27, 0xe4, 0x31, 0xd4, 0xdd, 0x1d, 0xa3, 0x35, + 0xd2, 0x54, 0xe7, 0xea, 0xac, 0x05, 0xcf, 0xa0, 0xda, 0x47, 0x2a, 0x51, 0x92, 0xdd, 0xed, 0x92, + 0x5b, 0xa7, 0xb9, 0x9e, 0x72, 0xb9, 0xa2, 0x68, 0x97, 0xa2, 0x87, 0x00, 0x6b, 0xc0, 0xd6, 0xd9, + 0x51, 0xc2, 0xf4, 0x34, 0x8f, 0xfc, 0x98, 0xcf, 0x82, 0xab, 0x3c, 0xc2, 0xcb, 0x94, 0x5f, 0x07, + 0x82, 0x09, 0x4c, 0x59, 0x86, 0x2a, 0xd8, 0x4c, 0x6d, 0xc2, 0xcf, 0xe3, 0x94, 0x61, 0xa6, 0xa3, + 0xaa, 0x09, 0xe3, 0xd1, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf8, 0x50, 0x9c, 0xbc, 0x19, 0x03, + 0x00, 0x00, } diff --git a/backend/api/go_client/visualization.pb.gw.go b/backend/api/go_client/visualization.pb.gw.go index 7f251f53b53..58f4830ca33 100755 --- a/backend/api/go_client/visualization.pb.gw.go +++ b/backend/api/go_client/visualization.pb.gw.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -54,6 +54,24 @@ func request_VisualizationService_CreateVisualization_0(ctx context.Context, mar return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) } + var ( + val string + ok bool + err error + _ = err + ) + + val, ok = pathParams["namespace"] + if !ok { + return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "namespace") + } + + protoReq.Namespace, err = runtime.String(val) + + if err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "namespace", err) + } + msg, err := client.CreateVisualization(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) return msg, metadata, err @@ -121,7 +139,7 @@ func RegisterVisualizationServiceHandlerClient(ctx context.Context, mux *runtime } var ( - pattern_VisualizationService_CreateVisualization_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"apis", "v1beta1", "visualizations"}, "")) + pattern_VisualizationService_CreateVisualization_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"apis", "v1beta1", "visualizations", "namespace"}, "")) ) var ( diff --git a/backend/api/go_http_client/experiment_client/experiment_client.go b/backend/api/go_http_client/experiment_client/experiment_client.go index 94c80206ed0..e1d0052b05e 100644 --- a/backend/api/go_http_client/experiment_client/experiment_client.go +++ b/backend/api/go_http_client/experiment_client/experiment_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/experiment_client/experiment_service/BUILD.bazel b/backend/api/go_http_client/experiment_client/experiment_service/BUILD.bazel index 6669641c518..2d5a4495fd2 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/BUILD.bazel +++ b/backend/api/go_http_client/experiment_client/experiment_service/BUILD.bazel @@ -3,6 +3,8 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library") go_library( name = "go_default_library", srcs = [ + "archive_experiment_parameters.go", + "archive_experiment_responses.go", "create_experiment_parameters.go", "create_experiment_responses.go", "delete_experiment_parameters.go", @@ -12,6 +14,8 @@ go_library( "get_experiment_responses.go", "list_experiment_parameters.go", "list_experiment_responses.go", + "unarchive_experiment_parameters.go", + "unarchive_experiment_responses.go", ], importpath = "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_client/experiment_service", visibility = ["//visibility:public"], diff --git a/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go new file mode 100644 index 00000000000..471e6a5bfc6 --- /dev/null +++ b/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go @@ -0,0 +1,147 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + + strfmt "github.com/go-openapi/strfmt" +) + +// NewArchiveExperimentParams creates a new ArchiveExperimentParams object +// with the default values initialized. +func NewArchiveExperimentParams() *ArchiveExperimentParams { + var () + return &ArchiveExperimentParams{ + + timeout: cr.DefaultTimeout, + } +} + +// NewArchiveExperimentParamsWithTimeout creates a new ArchiveExperimentParams object +// with the default values initialized, and the ability to set a timeout on a request +func NewArchiveExperimentParamsWithTimeout(timeout time.Duration) *ArchiveExperimentParams { + var () + return &ArchiveExperimentParams{ + + timeout: timeout, + } +} + +// NewArchiveExperimentParamsWithContext creates a new ArchiveExperimentParams object +// with the default values initialized, and the ability to set a context for a request +func NewArchiveExperimentParamsWithContext(ctx context.Context) *ArchiveExperimentParams { + var () + return &ArchiveExperimentParams{ + + Context: ctx, + } +} + +// NewArchiveExperimentParamsWithHTTPClient creates a new ArchiveExperimentParams object +// with the default values initialized, and the ability to set a custom HTTPClient for a request +func NewArchiveExperimentParamsWithHTTPClient(client *http.Client) *ArchiveExperimentParams { + var () + return &ArchiveExperimentParams{ + HTTPClient: client, + } +} + +/*ArchiveExperimentParams contains all the parameters to send to the API endpoint +for the archive experiment operation typically these are written to a http.Request +*/ +type ArchiveExperimentParams struct { + + /*ID*/ + ID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithTimeout adds the timeout to the archive experiment params +func (o *ArchiveExperimentParams) WithTimeout(timeout time.Duration) *ArchiveExperimentParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the archive experiment params +func (o *ArchiveExperimentParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the archive experiment params +func (o *ArchiveExperimentParams) WithContext(ctx context.Context) *ArchiveExperimentParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the archive experiment params +func (o *ArchiveExperimentParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the archive experiment params +func (o *ArchiveExperimentParams) WithHTTPClient(client *http.Client) *ArchiveExperimentParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the archive experiment params +func (o *ArchiveExperimentParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithID adds the id to the archive experiment params +func (o *ArchiveExperimentParams) WithID(id string) *ArchiveExperimentParams { + o.SetID(id) + return o +} + +// SetID adds the id to the archive experiment params +func (o *ArchiveExperimentParams) SetID(id string) { + o.ID = id +} + +// WriteToRequest writes these params to a swagger request +func (o *ArchiveExperimentParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param id + if err := r.SetPathParam("id", o.ID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_responses.go b/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_responses.go new file mode 100644 index 00000000000..dcbe3ff58da --- /dev/null +++ b/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_responses.go @@ -0,0 +1,124 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "fmt" + "io" + + "github.com/go-openapi/runtime" + + strfmt "github.com/go-openapi/strfmt" + + experiment_model "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_model" +) + +// ArchiveExperimentReader is a Reader for the ArchiveExperiment structure. +type ArchiveExperimentReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ArchiveExperimentReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + + case 200: + result := NewArchiveExperimentOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + + default: + result := NewArchiveExperimentDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewArchiveExperimentOK creates a ArchiveExperimentOK with default headers values +func NewArchiveExperimentOK() *ArchiveExperimentOK { + return &ArchiveExperimentOK{} +} + +/*ArchiveExperimentOK handles this case with default header values. + +A successful response. +*/ +type ArchiveExperimentOK struct { + Payload interface{} +} + +func (o *ArchiveExperimentOK) Error() string { + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:archive][%d] archiveExperimentOK %+v", 200, o.Payload) +} + +func (o *ArchiveExperimentOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewArchiveExperimentDefault creates a ArchiveExperimentDefault with default headers values +func NewArchiveExperimentDefault(code int) *ArchiveExperimentDefault { + return &ArchiveExperimentDefault{ + _statusCode: code, + } +} + +/*ArchiveExperimentDefault handles this case with default header values. + +ArchiveExperimentDefault archive experiment default +*/ +type ArchiveExperimentDefault struct { + _statusCode int + + Payload *experiment_model.APIStatus +} + +// Code gets the status code for the archive experiment default response +func (o *ArchiveExperimentDefault) Code() int { + return o._statusCode +} + +func (o *ArchiveExperimentDefault) Error() string { + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:archive][%d] ArchiveExperiment default %+v", o._statusCode, o.Payload) +} + +func (o *ArchiveExperimentDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(experiment_model.APIStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/go_http_client/experiment_client/experiment_service/create_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/create_experiment_parameters.go index 230a2653770..d791f2e4c07 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/create_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/create_experiment_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -78,7 +78,7 @@ for the create experiment operation typically these are written to a http.Reques type CreateExperimentParams struct { /*Body - The experiment to be created + The experiment to be created. */ Body *experiment_model.APIExperiment diff --git a/backend/api/go_http_client/experiment_client/experiment_service/create_experiment_responses.go b/backend/api/go_http_client/experiment_client/experiment_service/create_experiment_responses.go index 037de95e96b..83155efa091 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/create_experiment_responses.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/create_experiment_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/experiment_client/experiment_service/delete_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/delete_experiment_parameters.go index dd7399f782a..c15aca1d698 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/delete_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/delete_experiment_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/experiment_client/experiment_service/delete_experiment_responses.go b/backend/api/go_http_client/experiment_client/experiment_service/delete_experiment_responses.go index a9b2a696034..01a45fb5502 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/delete_experiment_responses.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/delete_experiment_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go b/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go index 174ad5415fb..8544a203002 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -39,7 +39,36 @@ type Client struct { } /* -CreateExperiment create experiment API +ArchiveExperiment archives an experiment +*/ +func (a *Client) ArchiveExperiment(params *ArchiveExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*ArchiveExperimentOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewArchiveExperimentParams() + } + + result, err := a.transport.Submit(&runtime.ClientOperation{ + ID: "ArchiveExperiment", + Method: "POST", + PathPattern: "/apis/v1beta1/experiments/{id}:archive", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &ArchiveExperimentReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + }) + if err != nil { + return nil, err + } + return result.(*ArchiveExperimentOK), nil + +} + +/* +CreateExperiment creates a new experiment */ func (a *Client) CreateExperiment(params *CreateExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*CreateExperimentOK, error) { // TODO: Validate the params before sending @@ -68,7 +97,7 @@ func (a *Client) CreateExperiment(params *CreateExperimentParams, authInfo runti } /* -DeleteExperiment delete experiment API +DeleteExperiment deletes an experiment */ func (a *Client) DeleteExperiment(params *DeleteExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*DeleteExperimentOK, error) { // TODO: Validate the params before sending @@ -97,7 +126,7 @@ func (a *Client) DeleteExperiment(params *DeleteExperimentParams, authInfo runti } /* -GetExperiment get experiment API +GetExperiment finds a specific experiment by ID */ func (a *Client) GetExperiment(params *GetExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*GetExperimentOK, error) { // TODO: Validate the params before sending @@ -126,7 +155,7 @@ func (a *Client) GetExperiment(params *GetExperimentParams, authInfo runtime.Cli } /* -ListExperiment list experiment API +ListExperiment finds all experiments */ func (a *Client) ListExperiment(params *ListExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*ListExperimentOK, error) { // TODO: Validate the params before sending @@ -154,6 +183,35 @@ func (a *Client) ListExperiment(params *ListExperimentParams, authInfo runtime.C } +/* +UnarchiveExperiment restores an archived experiment +*/ +func (a *Client) UnarchiveExperiment(params *UnarchiveExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*UnarchiveExperimentOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewUnarchiveExperimentParams() + } + + result, err := a.transport.Submit(&runtime.ClientOperation{ + ID: "UnarchiveExperiment", + Method: "POST", + PathPattern: "/apis/v1beta1/experiments/{id}:unarchive", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &UnarchiveExperimentReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + }) + if err != nil { + return nil, err + } + return result.(*UnarchiveExperimentOK), nil + +} + // SetTransport changes the transport on the client func (a *Client) SetTransport(transport runtime.ClientTransport) { a.transport = transport diff --git a/backend/api/go_http_client/experiment_client/experiment_service/get_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/get_experiment_parameters.go index 67f5d5b11ee..010787d13ef 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/get_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/get_experiment_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -76,7 +76,7 @@ for the get experiment operation typically these are written to a http.Request type GetExperimentParams struct { /*ID - The ID of the experiment to be retrieved + The ID of the experiment to be retrieved. */ ID string diff --git a/backend/api/go_http_client/experiment_client/experiment_service/get_experiment_responses.go b/backend/api/go_http_client/experiment_client/experiment_service/get_experiment_responses.go index 29d70ea98c5..95a3003d6dc 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/get_experiment_responses.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/get_experiment_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go index 944ca118fa3..e724148ca81 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -35,8 +35,11 @@ import ( // NewListExperimentParams creates a new ListExperimentParams object // with the default values initialized. func NewListExperimentParams() *ListExperimentParams { - var () + var ( + resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) return &ListExperimentParams{ + ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, timeout: cr.DefaultTimeout, } @@ -45,8 +48,11 @@ func NewListExperimentParams() *ListExperimentParams { // NewListExperimentParamsWithTimeout creates a new ListExperimentParams object // with the default values initialized, and the ability to set a timeout on a request func NewListExperimentParamsWithTimeout(timeout time.Duration) *ListExperimentParams { - var () + var ( + resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) return &ListExperimentParams{ + ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, timeout: timeout, } @@ -55,8 +61,11 @@ func NewListExperimentParamsWithTimeout(timeout time.Duration) *ListExperimentPa // NewListExperimentParamsWithContext creates a new ListExperimentParams object // with the default values initialized, and the ability to set a context for a request func NewListExperimentParamsWithContext(ctx context.Context) *ListExperimentParams { - var () + var ( + resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) return &ListExperimentParams{ + ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, Context: ctx, } @@ -65,9 +74,12 @@ func NewListExperimentParamsWithContext(ctx context.Context) *ListExperimentPara // NewListExperimentParamsWithHTTPClient creates a new ListExperimentParams object // with the default values initialized, and the ability to set a custom HTTPClient for a request func NewListExperimentParamsWithHTTPClient(client *http.Client) *ListExperimentParams { - var () + var ( + resourceReferenceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) return &ListExperimentParams{ - HTTPClient: client, + ResourceReferenceKeyType: &resourceReferenceKeyTypeDefault, + HTTPClient: client, } } @@ -77,8 +89,9 @@ for the list experiment operation typically these are written to a http.Request type ListExperimentParams struct { /*Filter - A base-64 encoded, JSON-serialized Filter protocol buffer (see - filter.proto). + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/ + blob/master/backend/api/filter.proto)). */ Filter *string @@ -86,6 +99,16 @@ type ListExperimentParams struct { PageSize *int32 /*PageToken*/ PageToken *string + /*ResourceReferenceKeyID + The ID of the resource that referred to. + + */ + ResourceReferenceKeyID *string + /*ResourceReferenceKeyType + The type of the resource that referred to. + + */ + ResourceReferenceKeyType *string /*SortBy Can be format of "field_name", "field_name asc" or "field_name des" Ascending by default. @@ -164,6 +187,28 @@ func (o *ListExperimentParams) SetPageToken(pageToken *string) { o.PageToken = pageToken } +// WithResourceReferenceKeyID adds the resourceReferenceKeyID to the list experiment params +func (o *ListExperimentParams) WithResourceReferenceKeyID(resourceReferenceKeyID *string) *ListExperimentParams { + o.SetResourceReferenceKeyID(resourceReferenceKeyID) + return o +} + +// SetResourceReferenceKeyID adds the resourceReferenceKeyId to the list experiment params +func (o *ListExperimentParams) SetResourceReferenceKeyID(resourceReferenceKeyID *string) { + o.ResourceReferenceKeyID = resourceReferenceKeyID +} + +// WithResourceReferenceKeyType adds the resourceReferenceKeyType to the list experiment params +func (o *ListExperimentParams) WithResourceReferenceKeyType(resourceReferenceKeyType *string) *ListExperimentParams { + o.SetResourceReferenceKeyType(resourceReferenceKeyType) + return o +} + +// SetResourceReferenceKeyType adds the resourceReferenceKeyType to the list experiment params +func (o *ListExperimentParams) SetResourceReferenceKeyType(resourceReferenceKeyType *string) { + o.ResourceReferenceKeyType = resourceReferenceKeyType +} + // WithSortBy adds the sortBy to the list experiment params func (o *ListExperimentParams) WithSortBy(sortBy *string) *ListExperimentParams { o.SetSortBy(sortBy) @@ -231,6 +276,38 @@ func (o *ListExperimentParams) WriteToRequest(r runtime.ClientRequest, reg strfm } + if o.ResourceReferenceKeyID != nil { + + // query param resource_reference_key.id + var qrResourceReferenceKeyID string + if o.ResourceReferenceKeyID != nil { + qrResourceReferenceKeyID = *o.ResourceReferenceKeyID + } + qResourceReferenceKeyID := qrResourceReferenceKeyID + if qResourceReferenceKeyID != "" { + if err := r.SetQueryParam("resource_reference_key.id", qResourceReferenceKeyID); err != nil { + return err + } + } + + } + + if o.ResourceReferenceKeyType != nil { + + // query param resource_reference_key.type + var qrResourceReferenceKeyType string + if o.ResourceReferenceKeyType != nil { + qrResourceReferenceKeyType = *o.ResourceReferenceKeyType + } + qResourceReferenceKeyType := qrResourceReferenceKeyType + if qResourceReferenceKeyType != "" { + if err := r.SetQueryParam("resource_reference_key.type", qResourceReferenceKeyType); err != nil { + return err + } + } + + } + if o.SortBy != nil { // query param sort_by diff --git a/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_responses.go b/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_responses.go index 020db2e5c31..d527442d50e 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_responses.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go new file mode 100644 index 00000000000..297a2959eb7 --- /dev/null +++ b/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go @@ -0,0 +1,147 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + + strfmt "github.com/go-openapi/strfmt" +) + +// NewUnarchiveExperimentParams creates a new UnarchiveExperimentParams object +// with the default values initialized. +func NewUnarchiveExperimentParams() *UnarchiveExperimentParams { + var () + return &UnarchiveExperimentParams{ + + timeout: cr.DefaultTimeout, + } +} + +// NewUnarchiveExperimentParamsWithTimeout creates a new UnarchiveExperimentParams object +// with the default values initialized, and the ability to set a timeout on a request +func NewUnarchiveExperimentParamsWithTimeout(timeout time.Duration) *UnarchiveExperimentParams { + var () + return &UnarchiveExperimentParams{ + + timeout: timeout, + } +} + +// NewUnarchiveExperimentParamsWithContext creates a new UnarchiveExperimentParams object +// with the default values initialized, and the ability to set a context for a request +func NewUnarchiveExperimentParamsWithContext(ctx context.Context) *UnarchiveExperimentParams { + var () + return &UnarchiveExperimentParams{ + + Context: ctx, + } +} + +// NewUnarchiveExperimentParamsWithHTTPClient creates a new UnarchiveExperimentParams object +// with the default values initialized, and the ability to set a custom HTTPClient for a request +func NewUnarchiveExperimentParamsWithHTTPClient(client *http.Client) *UnarchiveExperimentParams { + var () + return &UnarchiveExperimentParams{ + HTTPClient: client, + } +} + +/*UnarchiveExperimentParams contains all the parameters to send to the API endpoint +for the unarchive experiment operation typically these are written to a http.Request +*/ +type UnarchiveExperimentParams struct { + + /*ID*/ + ID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithTimeout adds the timeout to the unarchive experiment params +func (o *UnarchiveExperimentParams) WithTimeout(timeout time.Duration) *UnarchiveExperimentParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the unarchive experiment params +func (o *UnarchiveExperimentParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the unarchive experiment params +func (o *UnarchiveExperimentParams) WithContext(ctx context.Context) *UnarchiveExperimentParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the unarchive experiment params +func (o *UnarchiveExperimentParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the unarchive experiment params +func (o *UnarchiveExperimentParams) WithHTTPClient(client *http.Client) *UnarchiveExperimentParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the unarchive experiment params +func (o *UnarchiveExperimentParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithID adds the id to the unarchive experiment params +func (o *UnarchiveExperimentParams) WithID(id string) *UnarchiveExperimentParams { + o.SetID(id) + return o +} + +// SetID adds the id to the unarchive experiment params +func (o *UnarchiveExperimentParams) SetID(id string) { + o.ID = id +} + +// WriteToRequest writes these params to a swagger request +func (o *UnarchiveExperimentParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param id + if err := r.SetPathParam("id", o.ID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_responses.go b/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_responses.go new file mode 100644 index 00000000000..0713fb7ec45 --- /dev/null +++ b/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_responses.go @@ -0,0 +1,124 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "fmt" + "io" + + "github.com/go-openapi/runtime" + + strfmt "github.com/go-openapi/strfmt" + + experiment_model "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_model" +) + +// UnarchiveExperimentReader is a Reader for the UnarchiveExperiment structure. +type UnarchiveExperimentReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *UnarchiveExperimentReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + + case 200: + result := NewUnarchiveExperimentOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + + default: + result := NewUnarchiveExperimentDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewUnarchiveExperimentOK creates a UnarchiveExperimentOK with default headers values +func NewUnarchiveExperimentOK() *UnarchiveExperimentOK { + return &UnarchiveExperimentOK{} +} + +/*UnarchiveExperimentOK handles this case with default header values. + +A successful response. +*/ +type UnarchiveExperimentOK struct { + Payload interface{} +} + +func (o *UnarchiveExperimentOK) Error() string { + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:unarchive][%d] unarchiveExperimentOK %+v", 200, o.Payload) +} + +func (o *UnarchiveExperimentOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewUnarchiveExperimentDefault creates a UnarchiveExperimentDefault with default headers values +func NewUnarchiveExperimentDefault(code int) *UnarchiveExperimentDefault { + return &UnarchiveExperimentDefault{ + _statusCode: code, + } +} + +/*UnarchiveExperimentDefault handles this case with default header values. + +UnarchiveExperimentDefault unarchive experiment default +*/ +type UnarchiveExperimentDefault struct { + _statusCode int + + Payload *experiment_model.APIStatus +} + +// Code gets the status code for the unarchive experiment default response +func (o *UnarchiveExperimentDefault) Code() int { + return o._statusCode +} + +func (o *UnarchiveExperimentDefault) Error() string { + return fmt.Sprintf("[POST /apis/v1beta1/experiments/{id}:unarchive][%d] UnarchiveExperiment default %+v", o._statusCode, o.Payload) +} + +func (o *UnarchiveExperimentDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(experiment_model.APIStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/go_http_client/experiment_model/BUILD.bazel b/backend/api/go_http_client/experiment_model/BUILD.bazel index 42ad98a21b1..cf3f4b58f86 100644 --- a/backend/api/go_http_client/experiment_model/BUILD.bazel +++ b/backend/api/go_http_client/experiment_model/BUILD.bazel @@ -5,7 +5,12 @@ go_library( srcs = [ "api_experiment.go", "api_list_experiments_response.go", + "api_relationship.go", + "api_resource_key.go", + "api_resource_reference.go", + "api_resource_type.go", "api_status.go", + "experiment_storage_state.go", "protobuf_any.go", ], importpath = "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_model", diff --git a/backend/api/go_http_client/experiment_model/api_experiment.go b/backend/api/go_http_client/experiment_model/api_experiment.go index 415aea304a4..3f7e7520537 100644 --- a/backend/api/go_http_client/experiment_model/api_experiment.go +++ b/backend/api/go_http_client/experiment_model/api_experiment.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -20,6 +20,8 @@ package experiment_model // Editing this file might prove futile when you re-run the swagger generate command import ( + "strconv" + strfmt "github.com/go-openapi/strfmt" "github.com/go-openapi/errors" @@ -43,6 +45,13 @@ type APIExperiment struct { // Required input field. Unique experiment name provided by user. Name string `json:"name,omitempty"` + + // Optional input field. Specify which resource this run belongs to. + // For Experiment, the only valid resource reference is a single Namespace. + ResourceReferences []*APIResourceReference `json:"resource_references"` + + // storage state + StorageState ExperimentStorageState `json:"storage_state,omitempty"` } // Validate validates this api experiment @@ -53,6 +62,14 @@ func (m *APIExperiment) Validate(formats strfmt.Registry) error { res = append(res, err) } + if err := m.validateResourceReferences(formats); err != nil { + res = append(res, err) + } + + if err := m.validateStorageState(formats); err != nil { + res = append(res, err) + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } @@ -72,6 +89,47 @@ func (m *APIExperiment) validateCreatedAt(formats strfmt.Registry) error { return nil } +func (m *APIExperiment) validateResourceReferences(formats strfmt.Registry) error { + + if swag.IsZero(m.ResourceReferences) { // not required + return nil + } + + for i := 0; i < len(m.ResourceReferences); i++ { + if swag.IsZero(m.ResourceReferences[i]) { // not required + continue + } + + if m.ResourceReferences[i] != nil { + if err := m.ResourceReferences[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIExperiment) validateStorageState(formats strfmt.Registry) error { + + if swag.IsZero(m.StorageState) { // not required + return nil + } + + if err := m.StorageState.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("storage_state") + } + return err + } + + return nil +} + // MarshalBinary interface implementation func (m *APIExperiment) MarshalBinary() ([]byte, error) { if m == nil { diff --git a/backend/api/go_http_client/experiment_model/api_list_experiments_response.go b/backend/api/go_http_client/experiment_model/api_list_experiments_response.go index 2df91a0e806..6cc7ccdd43c 100644 --- a/backend/api/go_http_client/experiment_model/api_list_experiments_response.go +++ b/backend/api/go_http_client/experiment_model/api_list_experiments_response.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/experiment_model/api_relationship.go b/backend/api/go_http_client/experiment_model/api_relationship.go new file mode 100644 index 00000000000..91c6022d500 --- /dev/null +++ b/backend/api/go_http_client/experiment_model/api_relationship.go @@ -0,0 +1,80 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/validate" +) + +// APIRelationship api relationship +// swagger:model apiRelationship +type APIRelationship string + +const ( + + // APIRelationshipUNKNOWNRELATIONSHIP captures enum value "UNKNOWN_RELATIONSHIP" + APIRelationshipUNKNOWNRELATIONSHIP APIRelationship = "UNKNOWN_RELATIONSHIP" + + // APIRelationshipOWNER captures enum value "OWNER" + APIRelationshipOWNER APIRelationship = "OWNER" + + // APIRelationshipCREATOR captures enum value "CREATOR" + APIRelationshipCREATOR APIRelationship = "CREATOR" +) + +// for schema +var apiRelationshipEnum []interface{} + +func init() { + var res []APIRelationship + if err := json.Unmarshal([]byte(`["UNKNOWN_RELATIONSHIP","OWNER","CREATOR"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + apiRelationshipEnum = append(apiRelationshipEnum, v) + } +} + +func (m APIRelationship) validateAPIRelationshipEnum(path, location string, value APIRelationship) error { + if err := validate.Enum(path, location, value, apiRelationshipEnum); err != nil { + return err + } + return nil +} + +// Validate validates this api relationship +func (m APIRelationship) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateAPIRelationshipEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/experiment_model/api_resource_key.go b/backend/api/go_http_client/experiment_model/api_resource_key.go new file mode 100644 index 00000000000..522d442aa8f --- /dev/null +++ b/backend/api/go_http_client/experiment_model/api_resource_key.go @@ -0,0 +1,86 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" +) + +// APIResourceKey api resource key +// swagger:model apiResourceKey +type APIResourceKey struct { + + // The ID of the resource that referred to. + ID string `json:"id,omitempty"` + + // The type of the resource that referred to. + Type APIResourceType `json:"type,omitempty"` +} + +// Validate validates this api resource key +func (m *APIResourceKey) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceKey) validateType(formats strfmt.Registry) error { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } + return err + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIResourceKey) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIResourceKey) UnmarshalBinary(b []byte) error { + var res APIResourceKey + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/experiment_model/api_resource_reference.go b/backend/api/go_http_client/experiment_model/api_resource_reference.go new file mode 100644 index 00000000000..b335073e47f --- /dev/null +++ b/backend/api/go_http_client/experiment_model/api_resource_reference.go @@ -0,0 +1,111 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" +) + +// APIResourceReference api resource reference +// swagger:model apiResourceReference +type APIResourceReference struct { + + // key + Key *APIResourceKey `json:"key,omitempty"` + + // The name of the resource that referred to. + Name string `json:"name,omitempty"` + + // Required field. The relationship from referred resource to the object. + Relationship APIRelationship `json:"relationship,omitempty"` +} + +// Validate validates this api resource reference +func (m *APIResourceReference) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateKey(formats); err != nil { + res = append(res, err) + } + + if err := m.validateRelationship(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { + + if swag.IsZero(m.Key) { // not required + return nil + } + + if m.Key != nil { + if err := m.Key.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("key") + } + return err + } + } + + return nil +} + +func (m *APIResourceReference) validateRelationship(formats strfmt.Registry) error { + + if swag.IsZero(m.Relationship) { // not required + return nil + } + + if err := m.Relationship.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } + return err + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIResourceReference) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIResourceReference) UnmarshalBinary(b []byte) error { + var res APIResourceReference + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/experiment_model/api_resource_type.go b/backend/api/go_http_client/experiment_model/api_resource_type.go new file mode 100644 index 00000000000..f1d765c1c10 --- /dev/null +++ b/backend/api/go_http_client/experiment_model/api_resource_type.go @@ -0,0 +1,89 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/validate" +) + +// APIResourceType api resource type +// swagger:model apiResourceType +type APIResourceType string + +const ( + + // APIResourceTypeUNKNOWNRESOURCETYPE captures enum value "UNKNOWN_RESOURCE_TYPE" + APIResourceTypeUNKNOWNRESOURCETYPE APIResourceType = "UNKNOWN_RESOURCE_TYPE" + + // APIResourceTypeEXPERIMENT captures enum value "EXPERIMENT" + APIResourceTypeEXPERIMENT APIResourceType = "EXPERIMENT" + + // APIResourceTypeJOB captures enum value "JOB" + APIResourceTypeJOB APIResourceType = "JOB" + + // APIResourceTypePIPELINE captures enum value "PIPELINE" + APIResourceTypePIPELINE APIResourceType = "PIPELINE" + + // APIResourceTypePIPELINEVERSION captures enum value "PIPELINE_VERSION" + APIResourceTypePIPELINEVERSION APIResourceType = "PIPELINE_VERSION" + + // APIResourceTypeNAMESPACE captures enum value "NAMESPACE" + APIResourceTypeNAMESPACE APIResourceType = "NAMESPACE" +) + +// for schema +var apiResourceTypeEnum []interface{} + +func init() { + var res []APIResourceType + if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB","PIPELINE","PIPELINE_VERSION","NAMESPACE"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + apiResourceTypeEnum = append(apiResourceTypeEnum, v) + } +} + +func (m APIResourceType) validateAPIResourceTypeEnum(path, location string, value APIResourceType) error { + if err := validate.Enum(path, location, value, apiResourceTypeEnum); err != nil { + return err + } + return nil +} + +// Validate validates this api resource type +func (m APIResourceType) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateAPIResourceTypeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/experiment_model/api_status.go b/backend/api/go_http_client/experiment_model/api_status.go index 6df4f901311..c6f827f0497 100644 --- a/backend/api/go_http_client/experiment_model/api_status.go +++ b/backend/api/go_http_client/experiment_model/api_status.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/experiment_model/experiment_storage_state.go b/backend/api/go_http_client/experiment_model/experiment_storage_state.go new file mode 100644 index 00000000000..3d1bcad4c74 --- /dev/null +++ b/backend/api/go_http_client/experiment_model/experiment_storage_state.go @@ -0,0 +1,80 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package experiment_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/validate" +) + +// ExperimentStorageState experiment storage state +// swagger:model ExperimentStorageState +type ExperimentStorageState string + +const ( + + // ExperimentStorageStateSTORAGESTATEUNSPECIFIED captures enum value "STORAGESTATE_UNSPECIFIED" + ExperimentStorageStateSTORAGESTATEUNSPECIFIED ExperimentStorageState = "STORAGESTATE_UNSPECIFIED" + + // ExperimentStorageStateSTORAGESTATEAVAILABLE captures enum value "STORAGESTATE_AVAILABLE" + ExperimentStorageStateSTORAGESTATEAVAILABLE ExperimentStorageState = "STORAGESTATE_AVAILABLE" + + // ExperimentStorageStateSTORAGESTATEARCHIVED captures enum value "STORAGESTATE_ARCHIVED" + ExperimentStorageStateSTORAGESTATEARCHIVED ExperimentStorageState = "STORAGESTATE_ARCHIVED" +) + +// for schema +var experimentStorageStateEnum []interface{} + +func init() { + var res []ExperimentStorageState + if err := json.Unmarshal([]byte(`["STORAGESTATE_UNSPECIFIED","STORAGESTATE_AVAILABLE","STORAGESTATE_ARCHIVED"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + experimentStorageStateEnum = append(experimentStorageStateEnum, v) + } +} + +func (m ExperimentStorageState) validateExperimentStorageStateEnum(path, location string, value ExperimentStorageState) error { + if err := validate.Enum(path, location, value, experimentStorageStateEnum); err != nil { + return err + } + return nil +} + +// Validate validates this experiment storage state +func (m ExperimentStorageState) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateExperimentStorageStateEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/experiment_model/protobuf_any.go b/backend/api/go_http_client/experiment_model/protobuf_any.go index ef8da994723..036b03db7b4 100644 --- a/backend/api/go_http_client/experiment_model/protobuf_any.go +++ b/backend/api/go_http_client/experiment_model/protobuf_any.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_client.go b/backend/api/go_http_client/job_client/job_client.go index ee128ea98e2..7e3a7a6be6d 100644 --- a/backend/api/go_http_client/job_client/job_client.go +++ b/backend/api/go_http_client/job_client/job_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/create_job_parameters.go b/backend/api/go_http_client/job_client/job_service/create_job_parameters.go index fddd02458be..c0f825c56ad 100644 --- a/backend/api/go_http_client/job_client/job_service/create_job_parameters.go +++ b/backend/api/go_http_client/job_client/job_service/create_job_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/create_job_responses.go b/backend/api/go_http_client/job_client/job_service/create_job_responses.go index b57ff262d38..fe1c2c19efd 100644 --- a/backend/api/go_http_client/job_client/job_service/create_job_responses.go +++ b/backend/api/go_http_client/job_client/job_service/create_job_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/delete_job_parameters.go b/backend/api/go_http_client/job_client/job_service/delete_job_parameters.go index b90eb03c2fc..7d68014778f 100644 --- a/backend/api/go_http_client/job_client/job_service/delete_job_parameters.go +++ b/backend/api/go_http_client/job_client/job_service/delete_job_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/delete_job_responses.go b/backend/api/go_http_client/job_client/job_service/delete_job_responses.go index d070b340e82..90a03f0ed55 100644 --- a/backend/api/go_http_client/job_client/job_service/delete_job_responses.go +++ b/backend/api/go_http_client/job_client/job_service/delete_job_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/disable_job_parameters.go b/backend/api/go_http_client/job_client/job_service/disable_job_parameters.go index 6ab9d66de23..69ff70df0d9 100644 --- a/backend/api/go_http_client/job_client/job_service/disable_job_parameters.go +++ b/backend/api/go_http_client/job_client/job_service/disable_job_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/disable_job_responses.go b/backend/api/go_http_client/job_client/job_service/disable_job_responses.go index 3e197936a46..82c7d55e4ef 100644 --- a/backend/api/go_http_client/job_client/job_service/disable_job_responses.go +++ b/backend/api/go_http_client/job_client/job_service/disable_job_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/enable_job_parameters.go b/backend/api/go_http_client/job_client/job_service/enable_job_parameters.go index 2a2f7f0e867..75f9e3b81c0 100644 --- a/backend/api/go_http_client/job_client/job_service/enable_job_parameters.go +++ b/backend/api/go_http_client/job_client/job_service/enable_job_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/enable_job_responses.go b/backend/api/go_http_client/job_client/job_service/enable_job_responses.go index 6d084ea890e..8ff7ca1a56e 100644 --- a/backend/api/go_http_client/job_client/job_service/enable_job_responses.go +++ b/backend/api/go_http_client/job_client/job_service/enable_job_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/get_job_parameters.go b/backend/api/go_http_client/job_client/job_service/get_job_parameters.go index 604b3632c12..e87d9bebe8a 100644 --- a/backend/api/go_http_client/job_client/job_service/get_job_parameters.go +++ b/backend/api/go_http_client/job_client/job_service/get_job_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/get_job_responses.go b/backend/api/go_http_client/job_client/job_service/get_job_responses.go index a759c45a486..6d397fa95c2 100644 --- a/backend/api/go_http_client/job_client/job_service/get_job_responses.go +++ b/backend/api/go_http_client/job_client/job_service/get_job_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_client/job_service/job_service_client.go b/backend/api/go_http_client/job_client/job_service/job_service_client.go index 271b8a6ce59..b31e153b24f 100644 --- a/backend/api/go_http_client/job_client/job_service/job_service_client.go +++ b/backend/api/go_http_client/job_client/job_service/job_service_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -39,7 +39,7 @@ type Client struct { } /* -CreateJob create job API +CreateJob creates a new job */ func (a *Client) CreateJob(params *CreateJobParams, authInfo runtime.ClientAuthInfoWriter) (*CreateJobOK, error) { // TODO: Validate the params before sending @@ -68,7 +68,7 @@ func (a *Client) CreateJob(params *CreateJobParams, authInfo runtime.ClientAuthI } /* -DeleteJob delete job API +DeleteJob deletes a job */ func (a *Client) DeleteJob(params *DeleteJobParams, authInfo runtime.ClientAuthInfoWriter) (*DeleteJobOK, error) { // TODO: Validate the params before sending @@ -97,7 +97,7 @@ func (a *Client) DeleteJob(params *DeleteJobParams, authInfo runtime.ClientAuthI } /* -DisableJob disable job API +DisableJob stops a job and all its associated runs the job is not deleted */ func (a *Client) DisableJob(params *DisableJobParams, authInfo runtime.ClientAuthInfoWriter) (*DisableJobOK, error) { // TODO: Validate the params before sending @@ -126,7 +126,7 @@ func (a *Client) DisableJob(params *DisableJobParams, authInfo runtime.ClientAut } /* -EnableJob enable job API +EnableJob restarts a job that was previously stopped all runs associated with the job will continue */ func (a *Client) EnableJob(params *EnableJobParams, authInfo runtime.ClientAuthInfoWriter) (*EnableJobOK, error) { // TODO: Validate the params before sending @@ -155,7 +155,7 @@ func (a *Client) EnableJob(params *EnableJobParams, authInfo runtime.ClientAuthI } /* -GetJob get job API +GetJob finds a specific job by ID */ func (a *Client) GetJob(params *GetJobParams, authInfo runtime.ClientAuthInfoWriter) (*GetJobOK, error) { // TODO: Validate the params before sending @@ -184,7 +184,7 @@ func (a *Client) GetJob(params *GetJobParams, authInfo runtime.ClientAuthInfoWri } /* -ListJobs list jobs API +ListJobs finds all jobs */ func (a *Client) ListJobs(params *ListJobsParams, authInfo runtime.ClientAuthInfoWriter) (*ListJobsOK, error) { // TODO: Validate the params before sending diff --git a/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go b/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go index 8b1e0acfb76..41f4d691118 100644 --- a/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go +++ b/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -89,8 +89,9 @@ for the list jobs operation typically these are written to a http.Request type ListJobsParams struct { /*Filter - A base-64 encoded, JSON-serialized Filter protocol buffer (see - filter.proto). + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/ + blob/master/backend/api/filter.proto)). */ Filter *string @@ -109,7 +110,7 @@ type ListJobsParams struct { */ ResourceReferenceKeyType *string /*SortBy - Can be format of "field_name", "field_name asc" or "field_name des" + Can be format of "field_name", "field_name asc" or "field_name des". Ascending by default. */ diff --git a/backend/api/go_http_client/job_client/job_service/list_jobs_responses.go b/backend/api/go_http_client/job_client/job_service/list_jobs_responses.go index ca22ff55fd5..853933da722 100644 --- a/backend/api/go_http_client/job_client/job_service/list_jobs_responses.go +++ b/backend/api/go_http_client/job_client/job_service/list_jobs_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/api_cron_schedule.go b/backend/api/go_http_client/job_model/api_cron_schedule.go index 2782d88bf82..57dbb56f08c 100644 --- a/backend/api/go_http_client/job_model/api_cron_schedule.go +++ b/backend/api/go_http_client/job_model/api_cron_schedule.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -31,7 +31,8 @@ import ( // swagger:model apiCronSchedule type APICronSchedule struct { - // The cron string. For details how to compose a cron, visit ttps://en.wikipedia.org/wiki/Cron + // The cron string. For details how to compose a cron, visit + // ttps://en.wikipedia.org/wiki/Cron Cron string `json:"cron,omitempty"` // The end time of the cron job diff --git a/backend/api/go_http_client/job_model/api_job.go b/backend/api/go_http_client/job_model/api_job.go index ad967b7d7b9..b38e4becc34 100644 --- a/backend/api/go_http_client/job_model/api_job.go +++ b/backend/api/go_http_client/job_model/api_job.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -61,14 +61,22 @@ type APIJob struct { // Required input field. Job name provided by user. Not unique. Name string `json:"name,omitempty"` + // Optional input field. Whether the job should catch up if behind schedule. + // If true, the job will only schedule the latest interval if behind schedule. + // If false, the job will catch up on each past interval. + NoCatchup bool `json:"no_catchup,omitempty"` + // Required input field. // Describing what the pipeline manifest and parameters to use // for the scheduled job. PipelineSpec *APIPipelineSpec `json:"pipeline_spec,omitempty"` - // Optional input field. Specify which resource this run belongs to. + // Optional input field. Specify which resource this job belongs to. ResourceReferences []*APIResourceReference `json:"resource_references"` + // Optional input field. Specify which Kubernetes service account this job uses. + ServiceAccount string `json:"service_account,omitempty"` + // Output. The status of the job. // One of [Enable, Disable, Error] Status string `json:"status,omitempty"` diff --git a/backend/api/go_http_client/job_model/api_list_jobs_response.go b/backend/api/go_http_client/job_model/api_list_jobs_response.go index 36f28a582e3..4af64499ce2 100644 --- a/backend/api/go_http_client/job_model/api_list_jobs_response.go +++ b/backend/api/go_http_client/job_model/api_list_jobs_response.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/api_parameter.go b/backend/api/go_http_client/job_model/api_parameter.go index c5e79be1bc3..979ec9baa9a 100644 --- a/backend/api/go_http_client/job_model/api_parameter.go +++ b/backend/api/go_http_client/job_model/api_parameter.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/api_periodic_schedule.go b/backend/api/go_http_client/job_model/api_periodic_schedule.go index 4fa1054fe13..53fbf59a65b 100644 --- a/backend/api/go_http_client/job_model/api_periodic_schedule.go +++ b/backend/api/go_http_client/job_model/api_periodic_schedule.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/api_pipeline_spec.go b/backend/api/go_http_client/job_model/api_pipeline_spec.go index 25bacafbf50..0a0088bc390 100644 --- a/backend/api/go_http_client/job_model/api_pipeline_spec.go +++ b/backend/api/go_http_client/job_model/api_pipeline_spec.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/api_relationship.go b/backend/api/go_http_client/job_model/api_relationship.go index ba1bf6044d8..8cb5fe3a54d 100644 --- a/backend/api/go_http_client/job_model/api_relationship.go +++ b/backend/api/go_http_client/job_model/api_relationship.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/api_resource_key.go b/backend/api/go_http_client/job_model/api_resource_key.go index 7a072d6c823..e423e00e049 100644 --- a/backend/api/go_http_client/job_model/api_resource_key.go +++ b/backend/api/go_http_client/job_model/api_resource_key.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/api_resource_reference.go b/backend/api/go_http_client/job_model/api_resource_reference.go index 2bb68bad1da..7db74af8d5e 100644 --- a/backend/api/go_http_client/job_model/api_resource_reference.go +++ b/backend/api/go_http_client/job_model/api_resource_reference.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/api_resource_type.go b/backend/api/go_http_client/job_model/api_resource_type.go index a3f51152798..cb8b9c1d67d 100644 --- a/backend/api/go_http_client/job_model/api_resource_type.go +++ b/backend/api/go_http_client/job_model/api_resource_type.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -42,6 +42,15 @@ const ( // APIResourceTypeJOB captures enum value "JOB" APIResourceTypeJOB APIResourceType = "JOB" + + // APIResourceTypePIPELINE captures enum value "PIPELINE" + APIResourceTypePIPELINE APIResourceType = "PIPELINE" + + // APIResourceTypePIPELINEVERSION captures enum value "PIPELINE_VERSION" + APIResourceTypePIPELINEVERSION APIResourceType = "PIPELINE_VERSION" + + // APIResourceTypeNAMESPACE captures enum value "NAMESPACE" + APIResourceTypeNAMESPACE APIResourceType = "NAMESPACE" ) // for schema @@ -49,7 +58,7 @@ var apiResourceTypeEnum []interface{} func init() { var res []APIResourceType - if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB"]`), &res); err != nil { + if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB","PIPELINE","PIPELINE_VERSION","NAMESPACE"]`), &res); err != nil { panic(err) } for _, v := range res { diff --git a/backend/api/go_http_client/job_model/api_status.go b/backend/api/go_http_client/job_model/api_status.go index cf91036030b..5ba14433b01 100644 --- a/backend/api/go_http_client/job_model/api_status.go +++ b/backend/api/go_http_client/job_model/api_status.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/api_trigger.go b/backend/api/go_http_client/job_model/api_trigger.go index 99d14df75f4..410a07bff30 100644 --- a/backend/api/go_http_client/job_model/api_trigger.go +++ b/backend/api/go_http_client/job_model/api_trigger.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/job_mode.go b/backend/api/go_http_client/job_model/job_mode.go index acba1e78b0e..0d32bc42515 100644 --- a/backend/api/go_http_client/job_model/job_mode.go +++ b/backend/api/go_http_client/job_model/job_mode.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/job_model/protobuf_any.go b/backend/api/go_http_client/job_model/protobuf_any.go index fe6c9089430..63389b776f8 100644 --- a/backend/api/go_http_client/job_model/protobuf_any.go +++ b/backend/api/go_http_client/job_model/protobuf_any.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_client.go b/backend/api/go_http_client/pipeline_client/pipeline_client.go index 9e8c5a3bcc5..423197de123 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_client.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/BUILD.bazel b/backend/api/go_http_client/pipeline_client/pipeline_service/BUILD.bazel index 47d92f9f2b1..58fce3e5241 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/BUILD.bazel +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/BUILD.bazel @@ -5,12 +5,22 @@ go_library( srcs = [ "create_pipeline_parameters.go", "create_pipeline_responses.go", + "create_pipeline_version_parameters.go", + "create_pipeline_version_responses.go", "delete_pipeline_parameters.go", "delete_pipeline_responses.go", + "delete_pipeline_version_parameters.go", + "delete_pipeline_version_responses.go", "get_pipeline_parameters.go", "get_pipeline_responses.go", + "get_pipeline_version_parameters.go", + "get_pipeline_version_responses.go", + "get_pipeline_version_template_parameters.go", + "get_pipeline_version_template_responses.go", "get_template_parameters.go", "get_template_responses.go", + "list_pipeline_versions_parameters.go", + "list_pipeline_versions_responses.go", "list_pipelines_parameters.go", "list_pipelines_responses.go", "pipeline_service_client.go", diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_parameters.go index 7590a0fe8ee..563bcab43b3 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_responses.go b/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_responses.go index 085cc307c5e..ebd8703d41f 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_responses.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_version_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_version_parameters.go new file mode 100644 index 00000000000..6c1eaec63a5 --- /dev/null +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_version_parameters.go @@ -0,0 +1,154 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + + strfmt "github.com/go-openapi/strfmt" + + pipeline_model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" +) + +// NewCreatePipelineVersionParams creates a new CreatePipelineVersionParams object +// with the default values initialized. +func NewCreatePipelineVersionParams() *CreatePipelineVersionParams { + var () + return &CreatePipelineVersionParams{ + + timeout: cr.DefaultTimeout, + } +} + +// NewCreatePipelineVersionParamsWithTimeout creates a new CreatePipelineVersionParams object +// with the default values initialized, and the ability to set a timeout on a request +func NewCreatePipelineVersionParamsWithTimeout(timeout time.Duration) *CreatePipelineVersionParams { + var () + return &CreatePipelineVersionParams{ + + timeout: timeout, + } +} + +// NewCreatePipelineVersionParamsWithContext creates a new CreatePipelineVersionParams object +// with the default values initialized, and the ability to set a context for a request +func NewCreatePipelineVersionParamsWithContext(ctx context.Context) *CreatePipelineVersionParams { + var () + return &CreatePipelineVersionParams{ + + Context: ctx, + } +} + +// NewCreatePipelineVersionParamsWithHTTPClient creates a new CreatePipelineVersionParams object +// with the default values initialized, and the ability to set a custom HTTPClient for a request +func NewCreatePipelineVersionParamsWithHTTPClient(client *http.Client) *CreatePipelineVersionParams { + var () + return &CreatePipelineVersionParams{ + HTTPClient: client, + } +} + +/*CreatePipelineVersionParams contains all the parameters to send to the API endpoint +for the create pipeline version operation typically these are written to a http.Request +*/ +type CreatePipelineVersionParams struct { + + /*Body + ResourceReference inside PipelineVersion specifies the pipeline that this + version belongs to. + + */ + Body *pipeline_model.APIPipelineVersion + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithTimeout adds the timeout to the create pipeline version params +func (o *CreatePipelineVersionParams) WithTimeout(timeout time.Duration) *CreatePipelineVersionParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the create pipeline version params +func (o *CreatePipelineVersionParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the create pipeline version params +func (o *CreatePipelineVersionParams) WithContext(ctx context.Context) *CreatePipelineVersionParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the create pipeline version params +func (o *CreatePipelineVersionParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the create pipeline version params +func (o *CreatePipelineVersionParams) WithHTTPClient(client *http.Client) *CreatePipelineVersionParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the create pipeline version params +func (o *CreatePipelineVersionParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithBody adds the body to the create pipeline version params +func (o *CreatePipelineVersionParams) WithBody(body *pipeline_model.APIPipelineVersion) *CreatePipelineVersionParams { + o.SetBody(body) + return o +} + +// SetBody adds the body to the create pipeline version params +func (o *CreatePipelineVersionParams) SetBody(body *pipeline_model.APIPipelineVersion) { + o.Body = body +} + +// WriteToRequest writes these params to a swagger request +func (o *CreatePipelineVersionParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + if o.Body != nil { + if err := r.SetBodyParam(o.Body); err != nil { + return err + } + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_version_responses.go b/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_version_responses.go new file mode 100644 index 00000000000..02f89050c01 --- /dev/null +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/create_pipeline_version_responses.go @@ -0,0 +1,126 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "fmt" + "io" + + "github.com/go-openapi/runtime" + + strfmt "github.com/go-openapi/strfmt" + + pipeline_model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" +) + +// CreatePipelineVersionReader is a Reader for the CreatePipelineVersion structure. +type CreatePipelineVersionReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *CreatePipelineVersionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + + case 200: + result := NewCreatePipelineVersionOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + + default: + result := NewCreatePipelineVersionDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewCreatePipelineVersionOK creates a CreatePipelineVersionOK with default headers values +func NewCreatePipelineVersionOK() *CreatePipelineVersionOK { + return &CreatePipelineVersionOK{} +} + +/*CreatePipelineVersionOK handles this case with default header values. + +A successful response. +*/ +type CreatePipelineVersionOK struct { + Payload *pipeline_model.APIPipelineVersion +} + +func (o *CreatePipelineVersionOK) Error() string { + return fmt.Sprintf("[POST /apis/v1beta1/pipeline_versions][%d] createPipelineVersionOK %+v", 200, o.Payload) +} + +func (o *CreatePipelineVersionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_model.APIPipelineVersion) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewCreatePipelineVersionDefault creates a CreatePipelineVersionDefault with default headers values +func NewCreatePipelineVersionDefault(code int) *CreatePipelineVersionDefault { + return &CreatePipelineVersionDefault{ + _statusCode: code, + } +} + +/*CreatePipelineVersionDefault handles this case with default header values. + +CreatePipelineVersionDefault create pipeline version default +*/ +type CreatePipelineVersionDefault struct { + _statusCode int + + Payload *pipeline_model.APIStatus +} + +// Code gets the status code for the create pipeline version default response +func (o *CreatePipelineVersionDefault) Code() int { + return o._statusCode +} + +func (o *CreatePipelineVersionDefault) Error() string { + return fmt.Sprintf("[POST /apis/v1beta1/pipeline_versions][%d] CreatePipelineVersion default %+v", o._statusCode, o.Payload) +} + +func (o *CreatePipelineVersionDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_model.APIStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go index af2ae13f0c6..1c919efca4d 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_responses.go b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_responses.go index 94e36230eca..8a6b1e67d19 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_responses.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go new file mode 100644 index 00000000000..6fbb9b4a235 --- /dev/null +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go @@ -0,0 +1,147 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + + strfmt "github.com/go-openapi/strfmt" +) + +// NewDeletePipelineVersionParams creates a new DeletePipelineVersionParams object +// with the default values initialized. +func NewDeletePipelineVersionParams() *DeletePipelineVersionParams { + var () + return &DeletePipelineVersionParams{ + + timeout: cr.DefaultTimeout, + } +} + +// NewDeletePipelineVersionParamsWithTimeout creates a new DeletePipelineVersionParams object +// with the default values initialized, and the ability to set a timeout on a request +func NewDeletePipelineVersionParamsWithTimeout(timeout time.Duration) *DeletePipelineVersionParams { + var () + return &DeletePipelineVersionParams{ + + timeout: timeout, + } +} + +// NewDeletePipelineVersionParamsWithContext creates a new DeletePipelineVersionParams object +// with the default values initialized, and the ability to set a context for a request +func NewDeletePipelineVersionParamsWithContext(ctx context.Context) *DeletePipelineVersionParams { + var () + return &DeletePipelineVersionParams{ + + Context: ctx, + } +} + +// NewDeletePipelineVersionParamsWithHTTPClient creates a new DeletePipelineVersionParams object +// with the default values initialized, and the ability to set a custom HTTPClient for a request +func NewDeletePipelineVersionParamsWithHTTPClient(client *http.Client) *DeletePipelineVersionParams { + var () + return &DeletePipelineVersionParams{ + HTTPClient: client, + } +} + +/*DeletePipelineVersionParams contains all the parameters to send to the API endpoint +for the delete pipeline version operation typically these are written to a http.Request +*/ +type DeletePipelineVersionParams struct { + + /*VersionID*/ + VersionID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithTimeout adds the timeout to the delete pipeline version params +func (o *DeletePipelineVersionParams) WithTimeout(timeout time.Duration) *DeletePipelineVersionParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the delete pipeline version params +func (o *DeletePipelineVersionParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the delete pipeline version params +func (o *DeletePipelineVersionParams) WithContext(ctx context.Context) *DeletePipelineVersionParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the delete pipeline version params +func (o *DeletePipelineVersionParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the delete pipeline version params +func (o *DeletePipelineVersionParams) WithHTTPClient(client *http.Client) *DeletePipelineVersionParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the delete pipeline version params +func (o *DeletePipelineVersionParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithVersionID adds the versionID to the delete pipeline version params +func (o *DeletePipelineVersionParams) WithVersionID(versionID string) *DeletePipelineVersionParams { + o.SetVersionID(versionID) + return o +} + +// SetVersionID adds the versionId to the delete pipeline version params +func (o *DeletePipelineVersionParams) SetVersionID(versionID string) { + o.VersionID = versionID +} + +// WriteToRequest writes these params to a swagger request +func (o *DeletePipelineVersionParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param version_id + if err := r.SetPathParam("version_id", o.VersionID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_responses.go b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_responses.go new file mode 100644 index 00000000000..a82d5534178 --- /dev/null +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_responses.go @@ -0,0 +1,124 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "fmt" + "io" + + "github.com/go-openapi/runtime" + + strfmt "github.com/go-openapi/strfmt" + + pipeline_model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" +) + +// DeletePipelineVersionReader is a Reader for the DeletePipelineVersion structure. +type DeletePipelineVersionReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *DeletePipelineVersionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + + case 200: + result := NewDeletePipelineVersionOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + + default: + result := NewDeletePipelineVersionDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewDeletePipelineVersionOK creates a DeletePipelineVersionOK with default headers values +func NewDeletePipelineVersionOK() *DeletePipelineVersionOK { + return &DeletePipelineVersionOK{} +} + +/*DeletePipelineVersionOK handles this case with default header values. + +A successful response. +*/ +type DeletePipelineVersionOK struct { + Payload interface{} +} + +func (o *DeletePipelineVersionOK) Error() string { + return fmt.Sprintf("[DELETE /apis/v1beta1/pipeline_versions/{version_id}][%d] deletePipelineVersionOK %+v", 200, o.Payload) +} + +func (o *DeletePipelineVersionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + // response payload + if err := consumer.Consume(response.Body(), &o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewDeletePipelineVersionDefault creates a DeletePipelineVersionDefault with default headers values +func NewDeletePipelineVersionDefault(code int) *DeletePipelineVersionDefault { + return &DeletePipelineVersionDefault{ + _statusCode: code, + } +} + +/*DeletePipelineVersionDefault handles this case with default header values. + +DeletePipelineVersionDefault delete pipeline version default +*/ +type DeletePipelineVersionDefault struct { + _statusCode int + + Payload *pipeline_model.APIStatus +} + +// Code gets the status code for the delete pipeline version default response +func (o *DeletePipelineVersionDefault) Code() int { + return o._statusCode +} + +func (o *DeletePipelineVersionDefault) Error() string { + return fmt.Sprintf("[DELETE /apis/v1beta1/pipeline_versions/{version_id}][%d] DeletePipelineVersion default %+v", o._statusCode, o.Payload) +} + +func (o *DeletePipelineVersionDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_model.APIStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go index 5739265b175..b58421251b5 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_responses.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_responses.go index e30269545f4..02f8a34b064 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_responses.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go new file mode 100644 index 00000000000..3ac3c316fa2 --- /dev/null +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go @@ -0,0 +1,147 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + + strfmt "github.com/go-openapi/strfmt" +) + +// NewGetPipelineVersionParams creates a new GetPipelineVersionParams object +// with the default values initialized. +func NewGetPipelineVersionParams() *GetPipelineVersionParams { + var () + return &GetPipelineVersionParams{ + + timeout: cr.DefaultTimeout, + } +} + +// NewGetPipelineVersionParamsWithTimeout creates a new GetPipelineVersionParams object +// with the default values initialized, and the ability to set a timeout on a request +func NewGetPipelineVersionParamsWithTimeout(timeout time.Duration) *GetPipelineVersionParams { + var () + return &GetPipelineVersionParams{ + + timeout: timeout, + } +} + +// NewGetPipelineVersionParamsWithContext creates a new GetPipelineVersionParams object +// with the default values initialized, and the ability to set a context for a request +func NewGetPipelineVersionParamsWithContext(ctx context.Context) *GetPipelineVersionParams { + var () + return &GetPipelineVersionParams{ + + Context: ctx, + } +} + +// NewGetPipelineVersionParamsWithHTTPClient creates a new GetPipelineVersionParams object +// with the default values initialized, and the ability to set a custom HTTPClient for a request +func NewGetPipelineVersionParamsWithHTTPClient(client *http.Client) *GetPipelineVersionParams { + var () + return &GetPipelineVersionParams{ + HTTPClient: client, + } +} + +/*GetPipelineVersionParams contains all the parameters to send to the API endpoint +for the get pipeline version operation typically these are written to a http.Request +*/ +type GetPipelineVersionParams struct { + + /*VersionID*/ + VersionID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithTimeout adds the timeout to the get pipeline version params +func (o *GetPipelineVersionParams) WithTimeout(timeout time.Duration) *GetPipelineVersionParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the get pipeline version params +func (o *GetPipelineVersionParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the get pipeline version params +func (o *GetPipelineVersionParams) WithContext(ctx context.Context) *GetPipelineVersionParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the get pipeline version params +func (o *GetPipelineVersionParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the get pipeline version params +func (o *GetPipelineVersionParams) WithHTTPClient(client *http.Client) *GetPipelineVersionParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the get pipeline version params +func (o *GetPipelineVersionParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithVersionID adds the versionID to the get pipeline version params +func (o *GetPipelineVersionParams) WithVersionID(versionID string) *GetPipelineVersionParams { + o.SetVersionID(versionID) + return o +} + +// SetVersionID adds the versionId to the get pipeline version params +func (o *GetPipelineVersionParams) SetVersionID(versionID string) { + o.VersionID = versionID +} + +// WriteToRequest writes these params to a swagger request +func (o *GetPipelineVersionParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param version_id + if err := r.SetPathParam("version_id", o.VersionID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_responses.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_responses.go new file mode 100644 index 00000000000..9eb1384b939 --- /dev/null +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_responses.go @@ -0,0 +1,126 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "fmt" + "io" + + "github.com/go-openapi/runtime" + + strfmt "github.com/go-openapi/strfmt" + + pipeline_model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" +) + +// GetPipelineVersionReader is a Reader for the GetPipelineVersion structure. +type GetPipelineVersionReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *GetPipelineVersionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + + case 200: + result := NewGetPipelineVersionOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + + default: + result := NewGetPipelineVersionDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewGetPipelineVersionOK creates a GetPipelineVersionOK with default headers values +func NewGetPipelineVersionOK() *GetPipelineVersionOK { + return &GetPipelineVersionOK{} +} + +/*GetPipelineVersionOK handles this case with default header values. + +A successful response. +*/ +type GetPipelineVersionOK struct { + Payload *pipeline_model.APIPipelineVersion +} + +func (o *GetPipelineVersionOK) Error() string { + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}][%d] getPipelineVersionOK %+v", 200, o.Payload) +} + +func (o *GetPipelineVersionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_model.APIPipelineVersion) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewGetPipelineVersionDefault creates a GetPipelineVersionDefault with default headers values +func NewGetPipelineVersionDefault(code int) *GetPipelineVersionDefault { + return &GetPipelineVersionDefault{ + _statusCode: code, + } +} + +/*GetPipelineVersionDefault handles this case with default header values. + +GetPipelineVersionDefault get pipeline version default +*/ +type GetPipelineVersionDefault struct { + _statusCode int + + Payload *pipeline_model.APIStatus +} + +// Code gets the status code for the get pipeline version default response +func (o *GetPipelineVersionDefault) Code() int { + return o._statusCode +} + +func (o *GetPipelineVersionDefault) Error() string { + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}][%d] GetPipelineVersion default %+v", o._statusCode, o.Payload) +} + +func (o *GetPipelineVersionDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_model.APIStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go new file mode 100644 index 00000000000..818718fd829 --- /dev/null +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go @@ -0,0 +1,147 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + + strfmt "github.com/go-openapi/strfmt" +) + +// NewGetPipelineVersionTemplateParams creates a new GetPipelineVersionTemplateParams object +// with the default values initialized. +func NewGetPipelineVersionTemplateParams() *GetPipelineVersionTemplateParams { + var () + return &GetPipelineVersionTemplateParams{ + + timeout: cr.DefaultTimeout, + } +} + +// NewGetPipelineVersionTemplateParamsWithTimeout creates a new GetPipelineVersionTemplateParams object +// with the default values initialized, and the ability to set a timeout on a request +func NewGetPipelineVersionTemplateParamsWithTimeout(timeout time.Duration) *GetPipelineVersionTemplateParams { + var () + return &GetPipelineVersionTemplateParams{ + + timeout: timeout, + } +} + +// NewGetPipelineVersionTemplateParamsWithContext creates a new GetPipelineVersionTemplateParams object +// with the default values initialized, and the ability to set a context for a request +func NewGetPipelineVersionTemplateParamsWithContext(ctx context.Context) *GetPipelineVersionTemplateParams { + var () + return &GetPipelineVersionTemplateParams{ + + Context: ctx, + } +} + +// NewGetPipelineVersionTemplateParamsWithHTTPClient creates a new GetPipelineVersionTemplateParams object +// with the default values initialized, and the ability to set a custom HTTPClient for a request +func NewGetPipelineVersionTemplateParamsWithHTTPClient(client *http.Client) *GetPipelineVersionTemplateParams { + var () + return &GetPipelineVersionTemplateParams{ + HTTPClient: client, + } +} + +/*GetPipelineVersionTemplateParams contains all the parameters to send to the API endpoint +for the get pipeline version template operation typically these are written to a http.Request +*/ +type GetPipelineVersionTemplateParams struct { + + /*VersionID*/ + VersionID string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithTimeout adds the timeout to the get pipeline version template params +func (o *GetPipelineVersionTemplateParams) WithTimeout(timeout time.Duration) *GetPipelineVersionTemplateParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the get pipeline version template params +func (o *GetPipelineVersionTemplateParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the get pipeline version template params +func (o *GetPipelineVersionTemplateParams) WithContext(ctx context.Context) *GetPipelineVersionTemplateParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the get pipeline version template params +func (o *GetPipelineVersionTemplateParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the get pipeline version template params +func (o *GetPipelineVersionTemplateParams) WithHTTPClient(client *http.Client) *GetPipelineVersionTemplateParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the get pipeline version template params +func (o *GetPipelineVersionTemplateParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithVersionID adds the versionID to the get pipeline version template params +func (o *GetPipelineVersionTemplateParams) WithVersionID(versionID string) *GetPipelineVersionTemplateParams { + o.SetVersionID(versionID) + return o +} + +// SetVersionID adds the versionId to the get pipeline version template params +func (o *GetPipelineVersionTemplateParams) SetVersionID(versionID string) { + o.VersionID = versionID +} + +// WriteToRequest writes these params to a swagger request +func (o *GetPipelineVersionTemplateParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + // path param version_id + if err := r.SetPathParam("version_id", o.VersionID); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_responses.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_responses.go new file mode 100644 index 00000000000..38004f4cd8b --- /dev/null +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_responses.go @@ -0,0 +1,126 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "fmt" + "io" + + "github.com/go-openapi/runtime" + + strfmt "github.com/go-openapi/strfmt" + + pipeline_model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" +) + +// GetPipelineVersionTemplateReader is a Reader for the GetPipelineVersionTemplate structure. +type GetPipelineVersionTemplateReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *GetPipelineVersionTemplateReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + + case 200: + result := NewGetPipelineVersionTemplateOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + + default: + result := NewGetPipelineVersionTemplateDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewGetPipelineVersionTemplateOK creates a GetPipelineVersionTemplateOK with default headers values +func NewGetPipelineVersionTemplateOK() *GetPipelineVersionTemplateOK { + return &GetPipelineVersionTemplateOK{} +} + +/*GetPipelineVersionTemplateOK handles this case with default header values. + +A successful response. +*/ +type GetPipelineVersionTemplateOK struct { + Payload *pipeline_model.APIGetTemplateResponse +} + +func (o *GetPipelineVersionTemplateOK) Error() string { + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}/templates][%d] getPipelineVersionTemplateOK %+v", 200, o.Payload) +} + +func (o *GetPipelineVersionTemplateOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_model.APIGetTemplateResponse) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewGetPipelineVersionTemplateDefault creates a GetPipelineVersionTemplateDefault with default headers values +func NewGetPipelineVersionTemplateDefault(code int) *GetPipelineVersionTemplateDefault { + return &GetPipelineVersionTemplateDefault{ + _statusCode: code, + } +} + +/*GetPipelineVersionTemplateDefault handles this case with default header values. + +GetPipelineVersionTemplateDefault get pipeline version template default +*/ +type GetPipelineVersionTemplateDefault struct { + _statusCode int + + Payload *pipeline_model.APIStatus +} + +// Code gets the status code for the get pipeline version template default response +func (o *GetPipelineVersionTemplateDefault) Code() int { + return o._statusCode +} + +func (o *GetPipelineVersionTemplateDefault) Error() string { + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions/{version_id}/templates][%d] GetPipelineVersionTemplate default %+v", o._statusCode, o.Payload) +} + +func (o *GetPipelineVersionTemplateDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_model.APIStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go index 3bed728968d..aa9ca0f3493 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_responses.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_responses.go index 8553d5bb637..70d35f755ee 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_responses.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go new file mode 100644 index 00000000000..840d9ad0131 --- /dev/null +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go @@ -0,0 +1,330 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + "github.com/go-openapi/swag" + + strfmt "github.com/go-openapi/strfmt" +) + +// NewListPipelineVersionsParams creates a new ListPipelineVersionsParams object +// with the default values initialized. +func NewListPipelineVersionsParams() *ListPipelineVersionsParams { + var ( + resourceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) + return &ListPipelineVersionsParams{ + ResourceKeyType: &resourceKeyTypeDefault, + + timeout: cr.DefaultTimeout, + } +} + +// NewListPipelineVersionsParamsWithTimeout creates a new ListPipelineVersionsParams object +// with the default values initialized, and the ability to set a timeout on a request +func NewListPipelineVersionsParamsWithTimeout(timeout time.Duration) *ListPipelineVersionsParams { + var ( + resourceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) + return &ListPipelineVersionsParams{ + ResourceKeyType: &resourceKeyTypeDefault, + + timeout: timeout, + } +} + +// NewListPipelineVersionsParamsWithContext creates a new ListPipelineVersionsParams object +// with the default values initialized, and the ability to set a context for a request +func NewListPipelineVersionsParamsWithContext(ctx context.Context) *ListPipelineVersionsParams { + var ( + resourceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) + return &ListPipelineVersionsParams{ + ResourceKeyType: &resourceKeyTypeDefault, + + Context: ctx, + } +} + +// NewListPipelineVersionsParamsWithHTTPClient creates a new ListPipelineVersionsParams object +// with the default values initialized, and the ability to set a custom HTTPClient for a request +func NewListPipelineVersionsParamsWithHTTPClient(client *http.Client) *ListPipelineVersionsParams { + var ( + resourceKeyTypeDefault = string("UNKNOWN_RESOURCE_TYPE") + ) + return &ListPipelineVersionsParams{ + ResourceKeyType: &resourceKeyTypeDefault, + HTTPClient: client, + } +} + +/*ListPipelineVersionsParams contains all the parameters to send to the API endpoint +for the list pipeline versions operation typically these are written to a http.Request +*/ +type ListPipelineVersionsParams struct { + + /*Filter + A base-64 encoded, JSON-serialized Filter protocol buffer (see + filter.proto). + + */ + Filter *string + /*PageSize*/ + PageSize *int32 + /*PageToken*/ + PageToken *string + /*ResourceKeyID + The ID of the resource that referred to. + + */ + ResourceKeyID *string + /*ResourceKeyType + The type of the resource that referred to. + + */ + ResourceKeyType *string + /*SortBy + Can be format of "field_name", "field_name asc" or "field_name des" + Ascending by default. + + */ + SortBy *string + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithTimeout adds the timeout to the list pipeline versions params +func (o *ListPipelineVersionsParams) WithTimeout(timeout time.Duration) *ListPipelineVersionsParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the list pipeline versions params +func (o *ListPipelineVersionsParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the list pipeline versions params +func (o *ListPipelineVersionsParams) WithContext(ctx context.Context) *ListPipelineVersionsParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the list pipeline versions params +func (o *ListPipelineVersionsParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the list pipeline versions params +func (o *ListPipelineVersionsParams) WithHTTPClient(client *http.Client) *ListPipelineVersionsParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the list pipeline versions params +func (o *ListPipelineVersionsParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithFilter adds the filter to the list pipeline versions params +func (o *ListPipelineVersionsParams) WithFilter(filter *string) *ListPipelineVersionsParams { + o.SetFilter(filter) + return o +} + +// SetFilter adds the filter to the list pipeline versions params +func (o *ListPipelineVersionsParams) SetFilter(filter *string) { + o.Filter = filter +} + +// WithPageSize adds the pageSize to the list pipeline versions params +func (o *ListPipelineVersionsParams) WithPageSize(pageSize *int32) *ListPipelineVersionsParams { + o.SetPageSize(pageSize) + return o +} + +// SetPageSize adds the pageSize to the list pipeline versions params +func (o *ListPipelineVersionsParams) SetPageSize(pageSize *int32) { + o.PageSize = pageSize +} + +// WithPageToken adds the pageToken to the list pipeline versions params +func (o *ListPipelineVersionsParams) WithPageToken(pageToken *string) *ListPipelineVersionsParams { + o.SetPageToken(pageToken) + return o +} + +// SetPageToken adds the pageToken to the list pipeline versions params +func (o *ListPipelineVersionsParams) SetPageToken(pageToken *string) { + o.PageToken = pageToken +} + +// WithResourceKeyID adds the resourceKeyID to the list pipeline versions params +func (o *ListPipelineVersionsParams) WithResourceKeyID(resourceKeyID *string) *ListPipelineVersionsParams { + o.SetResourceKeyID(resourceKeyID) + return o +} + +// SetResourceKeyID adds the resourceKeyId to the list pipeline versions params +func (o *ListPipelineVersionsParams) SetResourceKeyID(resourceKeyID *string) { + o.ResourceKeyID = resourceKeyID +} + +// WithResourceKeyType adds the resourceKeyType to the list pipeline versions params +func (o *ListPipelineVersionsParams) WithResourceKeyType(resourceKeyType *string) *ListPipelineVersionsParams { + o.SetResourceKeyType(resourceKeyType) + return o +} + +// SetResourceKeyType adds the resourceKeyType to the list pipeline versions params +func (o *ListPipelineVersionsParams) SetResourceKeyType(resourceKeyType *string) { + o.ResourceKeyType = resourceKeyType +} + +// WithSortBy adds the sortBy to the list pipeline versions params +func (o *ListPipelineVersionsParams) WithSortBy(sortBy *string) *ListPipelineVersionsParams { + o.SetSortBy(sortBy) + return o +} + +// SetSortBy adds the sortBy to the list pipeline versions params +func (o *ListPipelineVersionsParams) SetSortBy(sortBy *string) { + o.SortBy = sortBy +} + +// WriteToRequest writes these params to a swagger request +func (o *ListPipelineVersionsParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + if o.Filter != nil { + + // query param filter + var qrFilter string + if o.Filter != nil { + qrFilter = *o.Filter + } + qFilter := qrFilter + if qFilter != "" { + if err := r.SetQueryParam("filter", qFilter); err != nil { + return err + } + } + + } + + if o.PageSize != nil { + + // query param page_size + var qrPageSize int32 + if o.PageSize != nil { + qrPageSize = *o.PageSize + } + qPageSize := swag.FormatInt32(qrPageSize) + if qPageSize != "" { + if err := r.SetQueryParam("page_size", qPageSize); err != nil { + return err + } + } + + } + + if o.PageToken != nil { + + // query param page_token + var qrPageToken string + if o.PageToken != nil { + qrPageToken = *o.PageToken + } + qPageToken := qrPageToken + if qPageToken != "" { + if err := r.SetQueryParam("page_token", qPageToken); err != nil { + return err + } + } + + } + + if o.ResourceKeyID != nil { + + // query param resource_key.id + var qrResourceKeyID string + if o.ResourceKeyID != nil { + qrResourceKeyID = *o.ResourceKeyID + } + qResourceKeyID := qrResourceKeyID + if qResourceKeyID != "" { + if err := r.SetQueryParam("resource_key.id", qResourceKeyID); err != nil { + return err + } + } + + } + + if o.ResourceKeyType != nil { + + // query param resource_key.type + var qrResourceKeyType string + if o.ResourceKeyType != nil { + qrResourceKeyType = *o.ResourceKeyType + } + qResourceKeyType := qrResourceKeyType + if qResourceKeyType != "" { + if err := r.SetQueryParam("resource_key.type", qResourceKeyType); err != nil { + return err + } + } + + } + + if o.SortBy != nil { + + // query param sort_by + var qrSortBy string + if o.SortBy != nil { + qrSortBy = *o.SortBy + } + qSortBy := qrSortBy + if qSortBy != "" { + if err := r.SetQueryParam("sort_by", qSortBy); err != nil { + return err + } + } + + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_responses.go b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_responses.go new file mode 100644 index 00000000000..a3229bb7399 --- /dev/null +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_responses.go @@ -0,0 +1,126 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "fmt" + "io" + + "github.com/go-openapi/runtime" + + strfmt "github.com/go-openapi/strfmt" + + pipeline_model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" +) + +// ListPipelineVersionsReader is a Reader for the ListPipelineVersions structure. +type ListPipelineVersionsReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *ListPipelineVersionsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + + case 200: + result := NewListPipelineVersionsOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + + default: + result := NewListPipelineVersionsDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewListPipelineVersionsOK creates a ListPipelineVersionsOK with default headers values +func NewListPipelineVersionsOK() *ListPipelineVersionsOK { + return &ListPipelineVersionsOK{} +} + +/*ListPipelineVersionsOK handles this case with default header values. + +A successful response. +*/ +type ListPipelineVersionsOK struct { + Payload *pipeline_model.APIListPipelineVersionsResponse +} + +func (o *ListPipelineVersionsOK) Error() string { + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions][%d] listPipelineVersionsOK %+v", 200, o.Payload) +} + +func (o *ListPipelineVersionsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_model.APIListPipelineVersionsResponse) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewListPipelineVersionsDefault creates a ListPipelineVersionsDefault with default headers values +func NewListPipelineVersionsDefault(code int) *ListPipelineVersionsDefault { + return &ListPipelineVersionsDefault{ + _statusCode: code, + } +} + +/*ListPipelineVersionsDefault handles this case with default header values. + +ListPipelineVersionsDefault list pipeline versions default +*/ +type ListPipelineVersionsDefault struct { + _statusCode int + + Payload *pipeline_model.APIStatus +} + +// Code gets the status code for the list pipeline versions default response +func (o *ListPipelineVersionsDefault) Code() int { + return o._statusCode +} + +func (o *ListPipelineVersionsDefault) Error() string { + return fmt.Sprintf("[GET /apis/v1beta1/pipeline_versions][%d] ListPipelineVersions default %+v", o._statusCode, o.Payload) +} + +func (o *ListPipelineVersionsDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_model.APIStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go index 6375cecd190..d8ad913f541 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -77,8 +77,9 @@ for the list pipelines operation typically these are written to a http.Request type ListPipelinesParams struct { /*Filter - A base-64 encoded, JSON-serialized Filter protocol buffer (see - filter.proto). + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/ + blob/master/backend/api/filter.proto)). */ Filter *string diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_responses.go b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_responses.go index a398468641f..bd0d7cf3d8c 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_responses.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go b/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go index d6d589b812f..3f041cfd4bd 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -39,7 +39,7 @@ type Client struct { } /* -CreatePipeline create pipeline API +CreatePipeline adds a pipeline */ func (a *Client) CreatePipeline(params *CreatePipelineParams, authInfo runtime.ClientAuthInfoWriter) (*CreatePipelineOK, error) { // TODO: Validate the params before sending @@ -68,7 +68,36 @@ func (a *Client) CreatePipeline(params *CreatePipelineParams, authInfo runtime.C } /* -DeletePipeline delete pipeline API +CreatePipelineVersion create pipeline version API +*/ +func (a *Client) CreatePipelineVersion(params *CreatePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*CreatePipelineVersionOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewCreatePipelineVersionParams() + } + + result, err := a.transport.Submit(&runtime.ClientOperation{ + ID: "CreatePipelineVersion", + Method: "POST", + PathPattern: "/apis/v1beta1/pipeline_versions", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &CreatePipelineVersionReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + }) + if err != nil { + return nil, err + } + return result.(*CreatePipelineVersionOK), nil + +} + +/* +DeletePipeline deletes a pipeline */ func (a *Client) DeletePipeline(params *DeletePipelineParams, authInfo runtime.ClientAuthInfoWriter) (*DeletePipelineOK, error) { // TODO: Validate the params before sending @@ -97,7 +126,36 @@ func (a *Client) DeletePipeline(params *DeletePipelineParams, authInfo runtime.C } /* -GetPipeline get pipeline API +DeletePipelineVersion delete pipeline version API +*/ +func (a *Client) DeletePipelineVersion(params *DeletePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*DeletePipelineVersionOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewDeletePipelineVersionParams() + } + + result, err := a.transport.Submit(&runtime.ClientOperation{ + ID: "DeletePipelineVersion", + Method: "DELETE", + PathPattern: "/apis/v1beta1/pipeline_versions/{version_id}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &DeletePipelineVersionReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + }) + if err != nil { + return nil, err + } + return result.(*DeletePipelineVersionOK), nil + +} + +/* +GetPipeline finds a specific pipeline by ID */ func (a *Client) GetPipeline(params *GetPipelineParams, authInfo runtime.ClientAuthInfoWriter) (*GetPipelineOK, error) { // TODO: Validate the params before sending @@ -126,7 +184,65 @@ func (a *Client) GetPipeline(params *GetPipelineParams, authInfo runtime.ClientA } /* -GetTemplate get template API +GetPipelineVersion get pipeline version API +*/ +func (a *Client) GetPipelineVersion(params *GetPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*GetPipelineVersionOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewGetPipelineVersionParams() + } + + result, err := a.transport.Submit(&runtime.ClientOperation{ + ID: "GetPipelineVersion", + Method: "GET", + PathPattern: "/apis/v1beta1/pipeline_versions/{version_id}", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &GetPipelineVersionReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + }) + if err != nil { + return nil, err + } + return result.(*GetPipelineVersionOK), nil + +} + +/* +GetPipelineVersionTemplate get pipeline version template API +*/ +func (a *Client) GetPipelineVersionTemplate(params *GetPipelineVersionTemplateParams, authInfo runtime.ClientAuthInfoWriter) (*GetPipelineVersionTemplateOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewGetPipelineVersionTemplateParams() + } + + result, err := a.transport.Submit(&runtime.ClientOperation{ + ID: "GetPipelineVersionTemplate", + Method: "GET", + PathPattern: "/apis/v1beta1/pipeline_versions/{version_id}/templates", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &GetPipelineVersionTemplateReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + }) + if err != nil { + return nil, err + } + return result.(*GetPipelineVersionTemplateOK), nil + +} + +/* +GetTemplate returns a single y a m l template that contains the description parameters and metadata associated with the pipeline provided */ func (a *Client) GetTemplate(params *GetTemplateParams, authInfo runtime.ClientAuthInfoWriter) (*GetTemplateOK, error) { // TODO: Validate the params before sending @@ -155,7 +271,36 @@ func (a *Client) GetTemplate(params *GetTemplateParams, authInfo runtime.ClientA } /* -ListPipelines list pipelines API +ListPipelineVersions list pipeline versions API +*/ +func (a *Client) ListPipelineVersions(params *ListPipelineVersionsParams, authInfo runtime.ClientAuthInfoWriter) (*ListPipelineVersionsOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewListPipelineVersionsParams() + } + + result, err := a.transport.Submit(&runtime.ClientOperation{ + ID: "ListPipelineVersions", + Method: "GET", + PathPattern: "/apis/v1beta1/pipeline_versions", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"application/json"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &ListPipelineVersionsReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + }) + if err != nil { + return nil, err + } + return result.(*ListPipelineVersionsOK), nil + +} + +/* +ListPipelines finds all pipelines */ func (a *Client) ListPipelines(params *ListPipelinesParams, authInfo runtime.ClientAuthInfoWriter) (*ListPipelinesOK, error) { // TODO: Validate the params before sending diff --git a/backend/api/go_http_client/pipeline_model/BUILD.bazel b/backend/api/go_http_client/pipeline_model/BUILD.bazel index a8aaa0c7000..a35b4bfea07 100644 --- a/backend/api/go_http_client/pipeline_model/BUILD.bazel +++ b/backend/api/go_http_client/pipeline_model/BUILD.bazel @@ -4,9 +4,15 @@ go_library( name = "go_default_library", srcs = [ "api_get_template_response.go", + "api_list_pipeline_versions_response.go", "api_list_pipelines_response.go", "api_parameter.go", "api_pipeline.go", + "api_pipeline_version.go", + "api_relationship.go", + "api_resource_key.go", + "api_resource_reference.go", + "api_resource_type.go", "api_status.go", "api_url.go", "protobuf_any.go", diff --git a/backend/api/go_http_client/pipeline_model/api_get_template_response.go b/backend/api/go_http_client/pipeline_model/api_get_template_response.go index ea191b157f5..b1ddc6420f2 100644 --- a/backend/api/go_http_client/pipeline_model/api_get_template_response.go +++ b/backend/api/go_http_client/pipeline_model/api_get_template_response.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go b/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go new file mode 100644 index 00000000000..a741068ead5 --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go @@ -0,0 +1,100 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "strconv" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" +) + +// APIListPipelineVersionsResponse api list pipeline versions response +// swagger:model apiListPipelineVersionsResponse +type APIListPipelineVersionsResponse struct { + + // next page token + NextPageToken string `json:"next_page_token,omitempty"` + + // total size + TotalSize int32 `json:"total_size,omitempty"` + + // versions + Versions []*APIPipelineVersion `json:"versions"` +} + +// Validate validates this api list pipeline versions response +func (m *APIListPipelineVersionsResponse) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateVersions(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIListPipelineVersionsResponse) validateVersions(formats strfmt.Registry) error { + + if swag.IsZero(m.Versions) { // not required + return nil + } + + for i := 0; i < len(m.Versions); i++ { + if swag.IsZero(m.Versions[i]) { // not required + continue + } + + if m.Versions[i] != nil { + if err := m.Versions[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("versions" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIListPipelineVersionsResponse) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIListPipelineVersionsResponse) UnmarshalBinary(b []byte) error { + var res APIListPipelineVersionsResponse + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go b/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go index 71060362794..a0b430061ee 100644 --- a/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go +++ b/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_model/api_parameter.go b/backend/api/go_http_client/pipeline_model/api_parameter.go index a6ca46f3efe..91c6f20c8c0 100644 --- a/backend/api/go_http_client/pipeline_model/api_parameter.go +++ b/backend/api/go_http_client/pipeline_model/api_parameter.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_model/api_pipeline.go b/backend/api/go_http_client/pipeline_model/api_pipeline.go index d8a8f8f5849..eeb81019792 100644 --- a/backend/api/go_http_client/pipeline_model/api_pipeline.go +++ b/backend/api/go_http_client/pipeline_model/api_pipeline.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -37,6 +37,12 @@ type APIPipeline struct { // Format: date-time CreatedAt strfmt.DateTime `json:"created_at,omitempty"` + // Output only. The default version of the pipeline. As of now, the latest + // version is used as default. (In the future, if desired by customers, we + // can allow them to set default version.) + // Read Only: true + DefaultVersion *APIPipelineVersion `json:"default_version,omitempty"` + // Optional input field. Describing the purpose of the job. Description string `json:"description,omitempty"` @@ -53,10 +59,16 @@ type APIPipeline struct { Name string `json:"name,omitempty"` // Output. The input parameters for this pipeline. + // TODO(jingzhang36): replace this parameters field with the parameters field + // inside PipelineVersion when all usage of the former has been changed to use + // the latter. Parameters []*APIParameter `json:"parameters"` // The URL to the source of the pipeline. This is required when creating the // pipeine through CreatePipeline API. + // TODO(jingzhang36): replace this url field with the code_source_urls field + // inside PipelineVersion when all usage of the former has been changed to use + // the latter. URL *APIURL `json:"url,omitempty"` } @@ -68,6 +80,10 @@ func (m *APIPipeline) Validate(formats strfmt.Registry) error { res = append(res, err) } + if err := m.validateDefaultVersion(formats); err != nil { + res = append(res, err) + } + if err := m.validateParameters(formats); err != nil { res = append(res, err) } @@ -95,6 +111,24 @@ func (m *APIPipeline) validateCreatedAt(formats strfmt.Registry) error { return nil } +func (m *APIPipeline) validateDefaultVersion(formats strfmt.Registry) error { + + if swag.IsZero(m.DefaultVersion) { // not required + return nil + } + + if m.DefaultVersion != nil { + if err := m.DefaultVersion.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("default_version") + } + return err + } + } + + return nil +} + func (m *APIPipeline) validateParameters(formats strfmt.Registry) error { if swag.IsZero(m.Parameters) { // not required diff --git a/backend/api/go_http_client/pipeline_model/api_pipeline_version.go b/backend/api/go_http_client/pipeline_model/api_pipeline_version.go new file mode 100644 index 00000000000..639cd85266a --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_pipeline_version.go @@ -0,0 +1,185 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "strconv" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// APIPipelineVersion api pipeline version +// swagger:model apiPipelineVersion +type APIPipelineVersion struct { + + // Input. Optional. Pipeline version code source. + CodeSourceURL string `json:"code_source_url,omitempty"` + + // Output. The time this pipeline version is created. + // Format: date-time + CreatedAt strfmt.DateTime `json:"created_at,omitempty"` + + // Output. Unique version ID. Generated by API server. + ID string `json:"id,omitempty"` + + // Optional input field. Version name provided by user. + Name string `json:"name,omitempty"` + + // Input. Required. Pipeline version package url. + // Whe calling CreatePipelineVersion API method, need to provide one package + // file location. + PackageURL *APIURL `json:"package_url,omitempty"` + + // Output. The input parameters for this pipeline. + Parameters []*APIParameter `json:"parameters"` + + // Input. Required. E.g., specify which pipeline this pipeline version belongs + // to. + ResourceReferences []*APIResourceReference `json:"resource_references"` +} + +// Validate validates this api pipeline version +func (m *APIPipelineVersion) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateCreatedAt(formats); err != nil { + res = append(res, err) + } + + if err := m.validatePackageURL(formats); err != nil { + res = append(res, err) + } + + if err := m.validateParameters(formats); err != nil { + res = append(res, err) + } + + if err := m.validateResourceReferences(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIPipelineVersion) validateCreatedAt(formats strfmt.Registry) error { + + if swag.IsZero(m.CreatedAt) { // not required + return nil + } + + if err := validate.FormatOf("created_at", "body", "date-time", m.CreatedAt.String(), formats); err != nil { + return err + } + + return nil +} + +func (m *APIPipelineVersion) validatePackageURL(formats strfmt.Registry) error { + + if swag.IsZero(m.PackageURL) { // not required + return nil + } + + if m.PackageURL != nil { + if err := m.PackageURL.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("package_url") + } + return err + } + } + + return nil +} + +func (m *APIPipelineVersion) validateParameters(formats strfmt.Registry) error { + + if swag.IsZero(m.Parameters) { // not required + return nil + } + + for i := 0; i < len(m.Parameters); i++ { + if swag.IsZero(m.Parameters[i]) { // not required + continue + } + + if m.Parameters[i] != nil { + if err := m.Parameters[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIPipelineVersion) validateResourceReferences(formats strfmt.Registry) error { + + if swag.IsZero(m.ResourceReferences) { // not required + return nil + } + + for i := 0; i < len(m.ResourceReferences); i++ { + if swag.IsZero(m.ResourceReferences[i]) { // not required + continue + } + + if m.ResourceReferences[i] != nil { + if err := m.ResourceReferences[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIPipelineVersion) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIPipelineVersion) UnmarshalBinary(b []byte) error { + var res APIPipelineVersion + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_relationship.go b/backend/api/go_http_client/pipeline_model/api_relationship.go new file mode 100644 index 00000000000..097ba38957e --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_relationship.go @@ -0,0 +1,80 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/validate" +) + +// APIRelationship api relationship +// swagger:model apiRelationship +type APIRelationship string + +const ( + + // APIRelationshipUNKNOWNRELATIONSHIP captures enum value "UNKNOWN_RELATIONSHIP" + APIRelationshipUNKNOWNRELATIONSHIP APIRelationship = "UNKNOWN_RELATIONSHIP" + + // APIRelationshipOWNER captures enum value "OWNER" + APIRelationshipOWNER APIRelationship = "OWNER" + + // APIRelationshipCREATOR captures enum value "CREATOR" + APIRelationshipCREATOR APIRelationship = "CREATOR" +) + +// for schema +var apiRelationshipEnum []interface{} + +func init() { + var res []APIRelationship + if err := json.Unmarshal([]byte(`["UNKNOWN_RELATIONSHIP","OWNER","CREATOR"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + apiRelationshipEnum = append(apiRelationshipEnum, v) + } +} + +func (m APIRelationship) validateAPIRelationshipEnum(path, location string, value APIRelationship) error { + if err := validate.Enum(path, location, value, apiRelationshipEnum); err != nil { + return err + } + return nil +} + +// Validate validates this api relationship +func (m APIRelationship) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateAPIRelationshipEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_resource_key.go b/backend/api/go_http_client/pipeline_model/api_resource_key.go new file mode 100644 index 00000000000..87aa61e318c --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_resource_key.go @@ -0,0 +1,86 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" +) + +// APIResourceKey api resource key +// swagger:model apiResourceKey +type APIResourceKey struct { + + // The ID of the resource that referred to. + ID string `json:"id,omitempty"` + + // The type of the resource that referred to. + Type APIResourceType `json:"type,omitempty"` +} + +// Validate validates this api resource key +func (m *APIResourceKey) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceKey) validateType(formats strfmt.Registry) error { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } + return err + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIResourceKey) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIResourceKey) UnmarshalBinary(b []byte) error { + var res APIResourceKey + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_resource_reference.go b/backend/api/go_http_client/pipeline_model/api_resource_reference.go new file mode 100644 index 00000000000..b0933b5419a --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_resource_reference.go @@ -0,0 +1,111 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" +) + +// APIResourceReference api resource reference +// swagger:model apiResourceReference +type APIResourceReference struct { + + // key + Key *APIResourceKey `json:"key,omitempty"` + + // The name of the resource that referred to. + Name string `json:"name,omitempty"` + + // Required field. The relationship from referred resource to the object. + Relationship APIRelationship `json:"relationship,omitempty"` +} + +// Validate validates this api resource reference +func (m *APIResourceReference) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateKey(formats); err != nil { + res = append(res, err) + } + + if err := m.validateRelationship(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { + + if swag.IsZero(m.Key) { // not required + return nil + } + + if m.Key != nil { + if err := m.Key.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("key") + } + return err + } + } + + return nil +} + +func (m *APIResourceReference) validateRelationship(formats strfmt.Registry) error { + + if swag.IsZero(m.Relationship) { // not required + return nil + } + + if err := m.Relationship.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } + return err + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIResourceReference) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIResourceReference) UnmarshalBinary(b []byte) error { + var res APIResourceReference + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_resource_type.go b/backend/api/go_http_client/pipeline_model/api_resource_type.go new file mode 100644 index 00000000000..3c6c602c9d0 --- /dev/null +++ b/backend/api/go_http_client/pipeline_model/api_resource_type.go @@ -0,0 +1,89 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/validate" +) + +// APIResourceType api resource type +// swagger:model apiResourceType +type APIResourceType string + +const ( + + // APIResourceTypeUNKNOWNRESOURCETYPE captures enum value "UNKNOWN_RESOURCE_TYPE" + APIResourceTypeUNKNOWNRESOURCETYPE APIResourceType = "UNKNOWN_RESOURCE_TYPE" + + // APIResourceTypeEXPERIMENT captures enum value "EXPERIMENT" + APIResourceTypeEXPERIMENT APIResourceType = "EXPERIMENT" + + // APIResourceTypeJOB captures enum value "JOB" + APIResourceTypeJOB APIResourceType = "JOB" + + // APIResourceTypePIPELINE captures enum value "PIPELINE" + APIResourceTypePIPELINE APIResourceType = "PIPELINE" + + // APIResourceTypePIPELINEVERSION captures enum value "PIPELINE_VERSION" + APIResourceTypePIPELINEVERSION APIResourceType = "PIPELINE_VERSION" + + // APIResourceTypeNAMESPACE captures enum value "NAMESPACE" + APIResourceTypeNAMESPACE APIResourceType = "NAMESPACE" +) + +// for schema +var apiResourceTypeEnum []interface{} + +func init() { + var res []APIResourceType + if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB","PIPELINE","PIPELINE_VERSION","NAMESPACE"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + apiResourceTypeEnum = append(apiResourceTypeEnum, v) + } +} + +func (m APIResourceType) validateAPIResourceTypeEnum(path, location string, value APIResourceType) error { + if err := validate.Enum(path, location, value, apiResourceTypeEnum); err != nil { + return err + } + return nil +} + +// Validate validates this api resource type +func (m APIResourceType) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateAPIResourceTypeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_model/api_status.go b/backend/api/go_http_client/pipeline_model/api_status.go index a4e3d713153..73494f571ce 100644 --- a/backend/api/go_http_client/pipeline_model/api_status.go +++ b/backend/api/go_http_client/pipeline_model/api_status.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_model/api_url.go b/backend/api/go_http_client/pipeline_model/api_url.go index 4a68267d640..f2b59f4cf56 100644 --- a/backend/api/go_http_client/pipeline_model/api_url.go +++ b/backend/api/go_http_client/pipeline_model/api_url.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_model/protobuf_any.go b/backend/api/go_http_client/pipeline_model/protobuf_any.go index e51a8992f8f..d017c1c10f7 100644 --- a/backend/api/go_http_client/pipeline_model/protobuf_any.go +++ b/backend/api/go_http_client/pipeline_model/protobuf_any.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_client.go b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_client.go index 691835975e4..cd658dd9e39 100644 --- a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_client.go +++ b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/BUILD.bazel b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/BUILD.bazel index be540f87e57..67db66690db 100644 --- a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/BUILD.bazel +++ b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/BUILD.bazel @@ -6,6 +6,8 @@ go_library( "pipeline_upload_service_client.go", "upload_pipeline_parameters.go", "upload_pipeline_responses.go", + "upload_pipeline_version_parameters.go", + "upload_pipeline_version_responses.go", ], importpath = "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service", visibility = ["//visibility:public"], diff --git a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go index 3d94b9018a6..35712772518 100644 --- a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go +++ b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/pipeline_upload_service_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -67,6 +67,35 @@ func (a *Client) UploadPipeline(params *UploadPipelineParams, authInfo runtime.C } +/* +UploadPipelineVersion upload pipeline version API +*/ +func (a *Client) UploadPipelineVersion(params *UploadPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*UploadPipelineVersionOK, error) { + // TODO: Validate the params before sending + if params == nil { + params = NewUploadPipelineVersionParams() + } + + result, err := a.transport.Submit(&runtime.ClientOperation{ + ID: "UploadPipelineVersion", + Method: "POST", + PathPattern: "/apis/v1beta1/pipelines/upload_version", + ProducesMediaTypes: []string{"application/json"}, + ConsumesMediaTypes: []string{"multipart/form-data"}, + Schemes: []string{"http", "https"}, + Params: params, + Reader: &UploadPipelineVersionReader{formats: a.formats}, + AuthInfo: authInfo, + Context: params.Context, + Client: params.HTTPClient, + }) + if err != nil { + return nil, err + } + return result.(*UploadPipelineVersionOK), nil + +} + // SetTransport changes the transport on the client func (a *Client) SetTransport(transport runtime.ClientTransport) { a.transport = transport diff --git a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go index a23c5ff4764..18f565df8a7 100644 --- a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go +++ b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -75,6 +75,8 @@ for the upload pipeline operation typically these are written to a http.Request */ type UploadPipelineParams struct { + /*Description*/ + Description *string /*Name*/ Name *string /*Uploadfile @@ -121,6 +123,17 @@ func (o *UploadPipelineParams) SetHTTPClient(client *http.Client) { o.HTTPClient = client } +// WithDescription adds the description to the upload pipeline params +func (o *UploadPipelineParams) WithDescription(description *string) *UploadPipelineParams { + o.SetDescription(description) + return o +} + +// SetDescription adds the description to the upload pipeline params +func (o *UploadPipelineParams) SetDescription(description *string) { + o.Description = description +} + // WithName adds the name to the upload pipeline params func (o *UploadPipelineParams) WithName(name *string) *UploadPipelineParams { o.SetName(name) @@ -151,6 +164,22 @@ func (o *UploadPipelineParams) WriteToRequest(r runtime.ClientRequest, reg strfm } var res []error + if o.Description != nil { + + // query param description + var qrDescription string + if o.Description != nil { + qrDescription = *o.Description + } + qDescription := qrDescription + if qDescription != "" { + if err := r.SetQueryParam("description", qDescription); err != nil { + return err + } + } + + } + if o.Name != nil { // query param name diff --git a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go index 0460bbeb4bf..1b09d1a9ebb 100644 --- a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go +++ b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go new file mode 100644 index 00000000000..5c5ecae486b --- /dev/null +++ b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_parameters.go @@ -0,0 +1,208 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_upload_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "context" + "net/http" + "time" + + "github.com/go-openapi/errors" + "github.com/go-openapi/runtime" + cr "github.com/go-openapi/runtime/client" + + strfmt "github.com/go-openapi/strfmt" +) + +// NewUploadPipelineVersionParams creates a new UploadPipelineVersionParams object +// with the default values initialized. +func NewUploadPipelineVersionParams() *UploadPipelineVersionParams { + var () + return &UploadPipelineVersionParams{ + + timeout: cr.DefaultTimeout, + } +} + +// NewUploadPipelineVersionParamsWithTimeout creates a new UploadPipelineVersionParams object +// with the default values initialized, and the ability to set a timeout on a request +func NewUploadPipelineVersionParamsWithTimeout(timeout time.Duration) *UploadPipelineVersionParams { + var () + return &UploadPipelineVersionParams{ + + timeout: timeout, + } +} + +// NewUploadPipelineVersionParamsWithContext creates a new UploadPipelineVersionParams object +// with the default values initialized, and the ability to set a context for a request +func NewUploadPipelineVersionParamsWithContext(ctx context.Context) *UploadPipelineVersionParams { + var () + return &UploadPipelineVersionParams{ + + Context: ctx, + } +} + +// NewUploadPipelineVersionParamsWithHTTPClient creates a new UploadPipelineVersionParams object +// with the default values initialized, and the ability to set a custom HTTPClient for a request +func NewUploadPipelineVersionParamsWithHTTPClient(client *http.Client) *UploadPipelineVersionParams { + var () + return &UploadPipelineVersionParams{ + HTTPClient: client, + } +} + +/*UploadPipelineVersionParams contains all the parameters to send to the API endpoint +for the upload pipeline version operation typically these are written to a http.Request +*/ +type UploadPipelineVersionParams struct { + + /*Name*/ + Name *string + /*Pipelineid*/ + Pipelineid *string + /*Uploadfile + The pipeline to upload. Maximum size of 32MB is supported. + + */ + Uploadfile runtime.NamedReadCloser + + timeout time.Duration + Context context.Context + HTTPClient *http.Client +} + +// WithTimeout adds the timeout to the upload pipeline version params +func (o *UploadPipelineVersionParams) WithTimeout(timeout time.Duration) *UploadPipelineVersionParams { + o.SetTimeout(timeout) + return o +} + +// SetTimeout adds the timeout to the upload pipeline version params +func (o *UploadPipelineVersionParams) SetTimeout(timeout time.Duration) { + o.timeout = timeout +} + +// WithContext adds the context to the upload pipeline version params +func (o *UploadPipelineVersionParams) WithContext(ctx context.Context) *UploadPipelineVersionParams { + o.SetContext(ctx) + return o +} + +// SetContext adds the context to the upload pipeline version params +func (o *UploadPipelineVersionParams) SetContext(ctx context.Context) { + o.Context = ctx +} + +// WithHTTPClient adds the HTTPClient to the upload pipeline version params +func (o *UploadPipelineVersionParams) WithHTTPClient(client *http.Client) *UploadPipelineVersionParams { + o.SetHTTPClient(client) + return o +} + +// SetHTTPClient adds the HTTPClient to the upload pipeline version params +func (o *UploadPipelineVersionParams) SetHTTPClient(client *http.Client) { + o.HTTPClient = client +} + +// WithName adds the name to the upload pipeline version params +func (o *UploadPipelineVersionParams) WithName(name *string) *UploadPipelineVersionParams { + o.SetName(name) + return o +} + +// SetName adds the name to the upload pipeline version params +func (o *UploadPipelineVersionParams) SetName(name *string) { + o.Name = name +} + +// WithPipelineid adds the pipelineid to the upload pipeline version params +func (o *UploadPipelineVersionParams) WithPipelineid(pipelineid *string) *UploadPipelineVersionParams { + o.SetPipelineid(pipelineid) + return o +} + +// SetPipelineid adds the pipelineid to the upload pipeline version params +func (o *UploadPipelineVersionParams) SetPipelineid(pipelineid *string) { + o.Pipelineid = pipelineid +} + +// WithUploadfile adds the uploadfile to the upload pipeline version params +func (o *UploadPipelineVersionParams) WithUploadfile(uploadfile runtime.NamedReadCloser) *UploadPipelineVersionParams { + o.SetUploadfile(uploadfile) + return o +} + +// SetUploadfile adds the uploadfile to the upload pipeline version params +func (o *UploadPipelineVersionParams) SetUploadfile(uploadfile runtime.NamedReadCloser) { + o.Uploadfile = uploadfile +} + +// WriteToRequest writes these params to a swagger request +func (o *UploadPipelineVersionParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { + + if err := r.SetTimeout(o.timeout); err != nil { + return err + } + var res []error + + if o.Name != nil { + + // query param name + var qrName string + if o.Name != nil { + qrName = *o.Name + } + qName := qrName + if qName != "" { + if err := r.SetQueryParam("name", qName); err != nil { + return err + } + } + + } + + if o.Pipelineid != nil { + + // query param pipelineid + var qrPipelineid string + if o.Pipelineid != nil { + qrPipelineid = *o.Pipelineid + } + qPipelineid := qrPipelineid + if qPipelineid != "" { + if err := r.SetQueryParam("pipelineid", qPipelineid); err != nil { + return err + } + } + + } + + // form file param uploadfile + if err := r.SetFileParam("uploadfile", o.Uploadfile); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go new file mode 100644 index 00000000000..a54d072b194 --- /dev/null +++ b/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service/upload_pipeline_version_responses.go @@ -0,0 +1,126 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_upload_service + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "fmt" + "io" + + "github.com/go-openapi/runtime" + + strfmt "github.com/go-openapi/strfmt" + + pipeline_upload_model "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_upload_model" +) + +// UploadPipelineVersionReader is a Reader for the UploadPipelineVersion structure. +type UploadPipelineVersionReader struct { + formats strfmt.Registry +} + +// ReadResponse reads a server response into the received o. +func (o *UploadPipelineVersionReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { + switch response.Code() { + + case 200: + result := NewUploadPipelineVersionOK() + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + return result, nil + + default: + result := NewUploadPipelineVersionDefault(response.Code()) + if err := result.readResponse(response, consumer, o.formats); err != nil { + return nil, err + } + if response.Code()/100 == 2 { + return result, nil + } + return nil, result + } +} + +// NewUploadPipelineVersionOK creates a UploadPipelineVersionOK with default headers values +func NewUploadPipelineVersionOK() *UploadPipelineVersionOK { + return &UploadPipelineVersionOK{} +} + +/*UploadPipelineVersionOK handles this case with default header values. + +UploadPipelineVersionOK upload pipeline version o k +*/ +type UploadPipelineVersionOK struct { + Payload *pipeline_upload_model.APIPipelineVersion +} + +func (o *UploadPipelineVersionOK) Error() string { + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload_version][%d] uploadPipelineVersionOK %+v", 200, o.Payload) +} + +func (o *UploadPipelineVersionOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_upload_model.APIPipelineVersion) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} + +// NewUploadPipelineVersionDefault creates a UploadPipelineVersionDefault with default headers values +func NewUploadPipelineVersionDefault(code int) *UploadPipelineVersionDefault { + return &UploadPipelineVersionDefault{ + _statusCode: code, + } +} + +/*UploadPipelineVersionDefault handles this case with default header values. + +UploadPipelineVersionDefault upload pipeline version default +*/ +type UploadPipelineVersionDefault struct { + _statusCode int + + Payload *pipeline_upload_model.APIStatus +} + +// Code gets the status code for the upload pipeline version default response +func (o *UploadPipelineVersionDefault) Code() int { + return o._statusCode +} + +func (o *UploadPipelineVersionDefault) Error() string { + return fmt.Sprintf("[POST /apis/v1beta1/pipelines/upload_version][%d] UploadPipelineVersion default %+v", o._statusCode, o.Payload) +} + +func (o *UploadPipelineVersionDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { + + o.Payload = new(pipeline_upload_model.APIStatus) + + // response payload + if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { + return err + } + + return nil +} diff --git a/backend/api/go_http_client/pipeline_upload_model/BUILD.bazel b/backend/api/go_http_client/pipeline_upload_model/BUILD.bazel index 9ec28c79c7a..47139760513 100644 --- a/backend/api/go_http_client/pipeline_upload_model/BUILD.bazel +++ b/backend/api/go_http_client/pipeline_upload_model/BUILD.bazel @@ -5,6 +5,11 @@ go_library( srcs = [ "api_parameter.go", "api_pipeline.go", + "api_pipeline_version.go", + "api_relationship.go", + "api_resource_key.go", + "api_resource_reference.go", + "api_resource_type.go", "api_status.go", "api_url.go", "protobuf_any.go", diff --git a/backend/api/go_http_client/pipeline_upload_model/api_parameter.go b/backend/api/go_http_client/pipeline_upload_model/api_parameter.go index c05b0225bb2..bc5826a121e 100644 --- a/backend/api/go_http_client/pipeline_upload_model/api_parameter.go +++ b/backend/api/go_http_client/pipeline_upload_model/api_parameter.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_upload_model/api_pipeline.go b/backend/api/go_http_client/pipeline_upload_model/api_pipeline.go index b0b53d0c9bb..e4759131ae8 100644 --- a/backend/api/go_http_client/pipeline_upload_model/api_pipeline.go +++ b/backend/api/go_http_client/pipeline_upload_model/api_pipeline.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_upload_model/api_pipeline_version.go b/backend/api/go_http_client/pipeline_upload_model/api_pipeline_version.go new file mode 100644 index 00000000000..506a65b383c --- /dev/null +++ b/backend/api/go_http_client/pipeline_upload_model/api_pipeline_version.go @@ -0,0 +1,185 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_upload_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "strconv" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" + "github.com/go-openapi/validate" +) + +// APIPipelineVersion api pipeline version +// swagger:model apiPipelineVersion +type APIPipelineVersion struct { + + // Input. Optional. Pipeline version code source. + CodeSourceURL string `json:"code_source_url,omitempty"` + + // Output. The time this pipeline version is created. + // Format: date-time + CreatedAt strfmt.DateTime `json:"created_at,omitempty"` + + // Output. Unique version ID. Generated by API server. + ID string `json:"id,omitempty"` + + // Optional input field. Version name provided by user. + Name string `json:"name,omitempty"` + + // Input. Required. Pipeline version package url. + // Whe calling CreatePipelineVersion API method, need to provide one package + // file location. + PackageURL *APIURL `json:"package_url,omitempty"` + + // Output. The input parameters for this pipeline. + Parameters []*APIParameter `json:"parameters"` + + // Input. Required. E.g., specify which pipeline this pipeline version belongs + // to. + ResourceReferences []*APIResourceReference `json:"resource_references"` +} + +// Validate validates this api pipeline version +func (m *APIPipelineVersion) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateCreatedAt(formats); err != nil { + res = append(res, err) + } + + if err := m.validatePackageURL(formats); err != nil { + res = append(res, err) + } + + if err := m.validateParameters(formats); err != nil { + res = append(res, err) + } + + if err := m.validateResourceReferences(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIPipelineVersion) validateCreatedAt(formats strfmt.Registry) error { + + if swag.IsZero(m.CreatedAt) { // not required + return nil + } + + if err := validate.FormatOf("created_at", "body", "date-time", m.CreatedAt.String(), formats); err != nil { + return err + } + + return nil +} + +func (m *APIPipelineVersion) validatePackageURL(formats strfmt.Registry) error { + + if swag.IsZero(m.PackageURL) { // not required + return nil + } + + if m.PackageURL != nil { + if err := m.PackageURL.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("package_url") + } + return err + } + } + + return nil +} + +func (m *APIPipelineVersion) validateParameters(formats strfmt.Registry) error { + + if swag.IsZero(m.Parameters) { // not required + return nil + } + + for i := 0; i < len(m.Parameters); i++ { + if swag.IsZero(m.Parameters[i]) { // not required + continue + } + + if m.Parameters[i] != nil { + if err := m.Parameters[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("parameters" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +func (m *APIPipelineVersion) validateResourceReferences(formats strfmt.Registry) error { + + if swag.IsZero(m.ResourceReferences) { // not required + return nil + } + + for i := 0; i < len(m.ResourceReferences); i++ { + if swag.IsZero(m.ResourceReferences[i]) { // not required + continue + } + + if m.ResourceReferences[i] != nil { + if err := m.ResourceReferences[i].Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("resource_references" + "." + strconv.Itoa(i)) + } + return err + } + } + + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIPipelineVersion) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIPipelineVersion) UnmarshalBinary(b []byte) error { + var res APIPipelineVersion + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_upload_model/api_relationship.go b/backend/api/go_http_client/pipeline_upload_model/api_relationship.go new file mode 100644 index 00000000000..402a3471a4c --- /dev/null +++ b/backend/api/go_http_client/pipeline_upload_model/api_relationship.go @@ -0,0 +1,80 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_upload_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/validate" +) + +// APIRelationship api relationship +// swagger:model apiRelationship +type APIRelationship string + +const ( + + // APIRelationshipUNKNOWNRELATIONSHIP captures enum value "UNKNOWN_RELATIONSHIP" + APIRelationshipUNKNOWNRELATIONSHIP APIRelationship = "UNKNOWN_RELATIONSHIP" + + // APIRelationshipOWNER captures enum value "OWNER" + APIRelationshipOWNER APIRelationship = "OWNER" + + // APIRelationshipCREATOR captures enum value "CREATOR" + APIRelationshipCREATOR APIRelationship = "CREATOR" +) + +// for schema +var apiRelationshipEnum []interface{} + +func init() { + var res []APIRelationship + if err := json.Unmarshal([]byte(`["UNKNOWN_RELATIONSHIP","OWNER","CREATOR"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + apiRelationshipEnum = append(apiRelationshipEnum, v) + } +} + +func (m APIRelationship) validateAPIRelationshipEnum(path, location string, value APIRelationship) error { + if err := validate.Enum(path, location, value, apiRelationshipEnum); err != nil { + return err + } + return nil +} + +// Validate validates this api relationship +func (m APIRelationship) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateAPIRelationshipEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_upload_model/api_resource_key.go b/backend/api/go_http_client/pipeline_upload_model/api_resource_key.go new file mode 100644 index 00000000000..6e57354dfbe --- /dev/null +++ b/backend/api/go_http_client/pipeline_upload_model/api_resource_key.go @@ -0,0 +1,86 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_upload_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" +) + +// APIResourceKey api resource key +// swagger:model apiResourceKey +type APIResourceKey struct { + + // The ID of the resource that referred to. + ID string `json:"id,omitempty"` + + // The type of the resource that referred to. + Type APIResourceType `json:"type,omitempty"` +} + +// Validate validates this api resource key +func (m *APIResourceKey) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateType(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceKey) validateType(formats strfmt.Registry) error { + + if swag.IsZero(m.Type) { // not required + return nil + } + + if err := m.Type.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("type") + } + return err + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIResourceKey) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIResourceKey) UnmarshalBinary(b []byte) error { + var res APIResourceKey + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_upload_model/api_resource_reference.go b/backend/api/go_http_client/pipeline_upload_model/api_resource_reference.go new file mode 100644 index 00000000000..6ea02e865dd --- /dev/null +++ b/backend/api/go_http_client/pipeline_upload_model/api_resource_reference.go @@ -0,0 +1,111 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_upload_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/swag" +) + +// APIResourceReference api resource reference +// swagger:model apiResourceReference +type APIResourceReference struct { + + // key + Key *APIResourceKey `json:"key,omitempty"` + + // The name of the resource that referred to. + Name string `json:"name,omitempty"` + + // Required field. The relationship from referred resource to the object. + Relationship APIRelationship `json:"relationship,omitempty"` +} + +// Validate validates this api resource reference +func (m *APIResourceReference) Validate(formats strfmt.Registry) error { + var res []error + + if err := m.validateKey(formats); err != nil { + res = append(res, err) + } + + if err := m.validateRelationship(formats); err != nil { + res = append(res, err) + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} + +func (m *APIResourceReference) validateKey(formats strfmt.Registry) error { + + if swag.IsZero(m.Key) { // not required + return nil + } + + if m.Key != nil { + if err := m.Key.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("key") + } + return err + } + } + + return nil +} + +func (m *APIResourceReference) validateRelationship(formats strfmt.Registry) error { + + if swag.IsZero(m.Relationship) { // not required + return nil + } + + if err := m.Relationship.Validate(formats); err != nil { + if ve, ok := err.(*errors.Validation); ok { + return ve.ValidateName("relationship") + } + return err + } + + return nil +} + +// MarshalBinary interface implementation +func (m *APIResourceReference) MarshalBinary() ([]byte, error) { + if m == nil { + return nil, nil + } + return swag.WriteJSON(m) +} + +// UnmarshalBinary interface implementation +func (m *APIResourceReference) UnmarshalBinary(b []byte) error { + var res APIResourceReference + if err := swag.ReadJSON(b, &res); err != nil { + return err + } + *m = res + return nil +} diff --git a/backend/api/go_http_client/pipeline_upload_model/api_resource_type.go b/backend/api/go_http_client/pipeline_upload_model/api_resource_type.go new file mode 100644 index 00000000000..e068e6ae4ba --- /dev/null +++ b/backend/api/go_http_client/pipeline_upload_model/api_resource_type.go @@ -0,0 +1,89 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Code generated by go-swagger; DO NOT EDIT. + +package pipeline_upload_model + +// This file was generated by the swagger tool. +// Editing this file might prove futile when you re-run the swagger generate command + +import ( + "encoding/json" + + strfmt "github.com/go-openapi/strfmt" + + "github.com/go-openapi/errors" + "github.com/go-openapi/validate" +) + +// APIResourceType api resource type +// swagger:model apiResourceType +type APIResourceType string + +const ( + + // APIResourceTypeUNKNOWNRESOURCETYPE captures enum value "UNKNOWN_RESOURCE_TYPE" + APIResourceTypeUNKNOWNRESOURCETYPE APIResourceType = "UNKNOWN_RESOURCE_TYPE" + + // APIResourceTypeEXPERIMENT captures enum value "EXPERIMENT" + APIResourceTypeEXPERIMENT APIResourceType = "EXPERIMENT" + + // APIResourceTypeJOB captures enum value "JOB" + APIResourceTypeJOB APIResourceType = "JOB" + + // APIResourceTypePIPELINE captures enum value "PIPELINE" + APIResourceTypePIPELINE APIResourceType = "PIPELINE" + + // APIResourceTypePIPELINEVERSION captures enum value "PIPELINE_VERSION" + APIResourceTypePIPELINEVERSION APIResourceType = "PIPELINE_VERSION" + + // APIResourceTypeNAMESPACE captures enum value "NAMESPACE" + APIResourceTypeNAMESPACE APIResourceType = "NAMESPACE" +) + +// for schema +var apiResourceTypeEnum []interface{} + +func init() { + var res []APIResourceType + if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB","PIPELINE","PIPELINE_VERSION","NAMESPACE"]`), &res); err != nil { + panic(err) + } + for _, v := range res { + apiResourceTypeEnum = append(apiResourceTypeEnum, v) + } +} + +func (m APIResourceType) validateAPIResourceTypeEnum(path, location string, value APIResourceType) error { + if err := validate.Enum(path, location, value, apiResourceTypeEnum); err != nil { + return err + } + return nil +} + +// Validate validates this api resource type +func (m APIResourceType) Validate(formats strfmt.Registry) error { + var res []error + + // value enum + if err := m.validateAPIResourceTypeEnum("", "body", m); err != nil { + return err + } + + if len(res) > 0 { + return errors.CompositeValidationError(res...) + } + return nil +} diff --git a/backend/api/go_http_client/pipeline_upload_model/api_status.go b/backend/api/go_http_client/pipeline_upload_model/api_status.go index 5734cbd3a0f..c45de143a49 100644 --- a/backend/api/go_http_client/pipeline_upload_model/api_status.go +++ b/backend/api/go_http_client/pipeline_upload_model/api_status.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_upload_model/api_url.go b/backend/api/go_http_client/pipeline_upload_model/api_url.go index ecf10bf3a9b..439fb181c92 100644 --- a/backend/api/go_http_client/pipeline_upload_model/api_url.go +++ b/backend/api/go_http_client/pipeline_upload_model/api_url.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/pipeline_upload_model/protobuf_any.go b/backend/api/go_http_client/pipeline_upload_model/protobuf_any.go index fd3b2f46a45..1f9895af6ea 100644 --- a/backend/api/go_http_client/pipeline_upload_model/protobuf_any.go +++ b/backend/api/go_http_client/pipeline_upload_model/protobuf_any.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_client.go b/backend/api/go_http_client/run_client/run_client.go index 0a09e35c8ca..0d5e1c2c346 100644 --- a/backend/api/go_http_client/run_client/run_client.go +++ b/backend/api/go_http_client/run_client/run_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go b/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go index 4baa5b68840..6199fd7e4f4 100644 --- a/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/archive_run_responses.go b/backend/api/go_http_client/run_client/run_service/archive_run_responses.go index 79c672bfcfb..cd3597a8fc4 100644 --- a/backend/api/go_http_client/run_client/run_service/archive_run_responses.go +++ b/backend/api/go_http_client/run_client/run_service/archive_run_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/create_run_parameters.go b/backend/api/go_http_client/run_client/run_service/create_run_parameters.go index 7f671ab5188..ca5a5038e6a 100644 --- a/backend/api/go_http_client/run_client/run_service/create_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/create_run_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/create_run_responses.go b/backend/api/go_http_client/run_client/run_service/create_run_responses.go index b33fccd3530..a3684864dc3 100644 --- a/backend/api/go_http_client/run_client/run_service/create_run_responses.go +++ b/backend/api/go_http_client/run_client/run_service/create_run_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go b/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go index 3aaa0bb2b0a..a69d318fc13 100644 --- a/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/delete_run_responses.go b/backend/api/go_http_client/run_client/run_service/delete_run_responses.go index 332f1ce4aea..03e996d29cf 100644 --- a/backend/api/go_http_client/run_client/run_service/delete_run_responses.go +++ b/backend/api/go_http_client/run_client/run_service/delete_run_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/get_run_parameters.go b/backend/api/go_http_client/run_client/run_service/get_run_parameters.go index eb0ef543bd1..d25095d8a20 100644 --- a/backend/api/go_http_client/run_client/run_service/get_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/get_run_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/get_run_responses.go b/backend/api/go_http_client/run_client/run_service/get_run_responses.go index 2d322daecab..791016ee519 100644 --- a/backend/api/go_http_client/run_client/run_service/get_run_responses.go +++ b/backend/api/go_http_client/run_client/run_service/get_run_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go b/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go index a1ff4389785..6494854ae46 100644 --- a/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -89,8 +89,9 @@ for the list runs operation typically these are written to a http.Request type ListRunsParams struct { /*Filter - A base-64 encoded, JSON-serialized Filter protocol buffer (see - filter.proto). + A url-encoded, JSON-serialized Filter protocol buffer (see + [filter.proto](https://github.com/kubeflow/pipelines/ + blob/master/backend/api/filter.proto)). */ Filter *string @@ -110,7 +111,7 @@ type ListRunsParams struct { ResourceReferenceKeyType *string /*SortBy Can be format of "field_name", "field_name asc" or "field_name des" - Ascending by default. + (Example, "name asc" or "id des"). Ascending by default. */ SortBy *string diff --git a/backend/api/go_http_client/run_client/run_service/list_runs_responses.go b/backend/api/go_http_client/run_client/run_service/list_runs_responses.go index bc30bec1306..cc0f2a9f9f2 100644 --- a/backend/api/go_http_client/run_client/run_service/list_runs_responses.go +++ b/backend/api/go_http_client/run_client/run_service/list_runs_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/read_artifact_parameters.go b/backend/api/go_http_client/run_client/run_service/read_artifact_parameters.go index d9dcb62bb95..31d8549bccd 100644 --- a/backend/api/go_http_client/run_client/run_service/read_artifact_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/read_artifact_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/read_artifact_responses.go b/backend/api/go_http_client/run_client/run_service/read_artifact_responses.go index 825ba3b9dc2..bebc4cb5220 100644 --- a/backend/api/go_http_client/run_client/run_service/read_artifact_responses.go +++ b/backend/api/go_http_client/run_client/run_service/read_artifact_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/report_run_metrics_parameters.go b/backend/api/go_http_client/run_client/run_service/report_run_metrics_parameters.go index 42275073049..d08fb8f5c03 100644 --- a/backend/api/go_http_client/run_client/run_service/report_run_metrics_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/report_run_metrics_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/report_run_metrics_responses.go b/backend/api/go_http_client/run_client/run_service/report_run_metrics_responses.go index 2acf6a87b8f..612d763fea5 100644 --- a/backend/api/go_http_client/run_client/run_service/report_run_metrics_responses.go +++ b/backend/api/go_http_client/run_client/run_service/report_run_metrics_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go b/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go index d37e070e7ce..ebe257f8b14 100644 --- a/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/retry_run_responses.go b/backend/api/go_http_client/run_client/run_service/retry_run_responses.go index 6327b72f953..d531f3170bb 100644 --- a/backend/api/go_http_client/run_client/run_service/retry_run_responses.go +++ b/backend/api/go_http_client/run_client/run_service/retry_run_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/run_service_client.go b/backend/api/go_http_client/run_client/run_service/run_service_client.go index 743377492ee..215317ce101 100644 --- a/backend/api/go_http_client/run_client/run_service/run_service_client.go +++ b/backend/api/go_http_client/run_client/run_service/run_service_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -39,7 +39,7 @@ type Client struct { } /* -ArchiveRun archive run API +ArchiveRun archives a run */ func (a *Client) ArchiveRun(params *ArchiveRunParams, authInfo runtime.ClientAuthInfoWriter) (*ArchiveRunOK, error) { // TODO: Validate the params before sending @@ -68,7 +68,7 @@ func (a *Client) ArchiveRun(params *ArchiveRunParams, authInfo runtime.ClientAut } /* -CreateRun create run API +CreateRun creates a new run */ func (a *Client) CreateRun(params *CreateRunParams, authInfo runtime.ClientAuthInfoWriter) (*CreateRunOK, error) { // TODO: Validate the params before sending @@ -97,7 +97,7 @@ func (a *Client) CreateRun(params *CreateRunParams, authInfo runtime.ClientAuthI } /* -DeleteRun delete run API +DeleteRun deletes a run */ func (a *Client) DeleteRun(params *DeleteRunParams, authInfo runtime.ClientAuthInfoWriter) (*DeleteRunOK, error) { // TODO: Validate the params before sending @@ -126,7 +126,7 @@ func (a *Client) DeleteRun(params *DeleteRunParams, authInfo runtime.ClientAuthI } /* -GetRun get run API +GetRun finds a specific run by ID */ func (a *Client) GetRun(params *GetRunParams, authInfo runtime.ClientAuthInfoWriter) (*GetRunOK, error) { // TODO: Validate the params before sending @@ -155,7 +155,7 @@ func (a *Client) GetRun(params *GetRunParams, authInfo runtime.ClientAuthInfoWri } /* -ListRuns list runs API +ListRuns finds all runs */ func (a *Client) ListRuns(params *ListRunsParams, authInfo runtime.ClientAuthInfoWriter) (*ListRunsOK, error) { // TODO: Validate the params before sending @@ -184,7 +184,7 @@ func (a *Client) ListRuns(params *ListRunsParams, authInfo runtime.ClientAuthInf } /* -ReadArtifact read artifact API +ReadArtifact finds a run s artifact data */ func (a *Client) ReadArtifact(params *ReadArtifactParams, authInfo runtime.ClientAuthInfoWriter) (*ReadArtifactOK, error) { // TODO: Validate the params before sending @@ -242,7 +242,7 @@ func (a *Client) ReportRunMetrics(params *ReportRunMetricsParams, authInfo runti } /* -RetryRun retry run API +RetryRun res initiate a failed or terminated run */ func (a *Client) RetryRun(params *RetryRunParams, authInfo runtime.ClientAuthInfoWriter) (*RetryRunOK, error) { // TODO: Validate the params before sending @@ -271,7 +271,7 @@ func (a *Client) RetryRun(params *RetryRunParams, authInfo runtime.ClientAuthInf } /* -TerminateRun terminate run API +TerminateRun terminates an active run */ func (a *Client) TerminateRun(params *TerminateRunParams, authInfo runtime.ClientAuthInfoWriter) (*TerminateRunOK, error) { // TODO: Validate the params before sending @@ -300,7 +300,7 @@ func (a *Client) TerminateRun(params *TerminateRunParams, authInfo runtime.Clien } /* -UnarchiveRun unarchive run API +UnarchiveRun restores an archived run */ func (a *Client) UnarchiveRun(params *UnarchiveRunParams, authInfo runtime.ClientAuthInfoWriter) (*UnarchiveRunOK, error) { // TODO: Validate the params before sending diff --git a/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go b/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go index b7208353c7a..8ed4662e9c6 100644 --- a/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/terminate_run_responses.go b/backend/api/go_http_client/run_client/run_service/terminate_run_responses.go index acced2b1c55..854602e2b4e 100644 --- a/backend/api/go_http_client/run_client/run_service/terminate_run_responses.go +++ b/backend/api/go_http_client/run_client/run_service/terminate_run_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go b/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go index 65fcbec2ab0..f97c8bae979 100644 --- a/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_client/run_service/unarchive_run_responses.go b/backend/api/go_http_client/run_client/run_service/unarchive_run_responses.go index 809356ba0e5..38372e22f43 100644 --- a/backend/api/go_http_client/run_client/run_service/unarchive_run_responses.go +++ b/backend/api/go_http_client/run_client/run_service/unarchive_run_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_list_runs_response.go b/backend/api/go_http_client/run_model/api_list_runs_response.go index d0d53e10f7d..7b929b8d8e0 100644 --- a/backend/api/go_http_client/run_model/api_list_runs_response.go +++ b/backend/api/go_http_client/run_model/api_list_runs_response.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_parameter.go b/backend/api/go_http_client/run_model/api_parameter.go index 77317872620..26a0bb1a2bf 100644 --- a/backend/api/go_http_client/run_model/api_parameter.go +++ b/backend/api/go_http_client/run_model/api_parameter.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_pipeline_runtime.go b/backend/api/go_http_client/run_model/api_pipeline_runtime.go index 22b3973e9b9..414257256d7 100644 --- a/backend/api/go_http_client/run_model/api_pipeline_runtime.go +++ b/backend/api/go_http_client/run_model/api_pipeline_runtime.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_pipeline_spec.go b/backend/api/go_http_client/run_model/api_pipeline_spec.go index 67b7e882f73..92f4baa160f 100644 --- a/backend/api/go_http_client/run_model/api_pipeline_spec.go +++ b/backend/api/go_http_client/run_model/api_pipeline_spec.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_read_artifact_response.go b/backend/api/go_http_client/run_model/api_read_artifact_response.go index cc71f3384fb..a4add05cd65 100644 --- a/backend/api/go_http_client/run_model/api_read_artifact_response.go +++ b/backend/api/go_http_client/run_model/api_read_artifact_response.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_relationship.go b/backend/api/go_http_client/run_model/api_relationship.go index 8d0759b51a8..22cc2ba403d 100644 --- a/backend/api/go_http_client/run_model/api_relationship.go +++ b/backend/api/go_http_client/run_model/api_relationship.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_report_run_metrics_request.go b/backend/api/go_http_client/run_model/api_report_run_metrics_request.go index 7ea636661c5..b83cae4182e 100644 --- a/backend/api/go_http_client/run_model/api_report_run_metrics_request.go +++ b/backend/api/go_http_client/run_model/api_report_run_metrics_request.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_report_run_metrics_response.go b/backend/api/go_http_client/run_model/api_report_run_metrics_response.go index 8f715085e30..14e548b4597 100644 --- a/backend/api/go_http_client/run_model/api_report_run_metrics_response.go +++ b/backend/api/go_http_client/run_model/api_report_run_metrics_response.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_resource_key.go b/backend/api/go_http_client/run_model/api_resource_key.go index 701cb4342a3..a493bd2f3e4 100644 --- a/backend/api/go_http_client/run_model/api_resource_key.go +++ b/backend/api/go_http_client/run_model/api_resource_key.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_resource_reference.go b/backend/api/go_http_client/run_model/api_resource_reference.go index 745b80fa052..3539c15572e 100644 --- a/backend/api/go_http_client/run_model/api_resource_reference.go +++ b/backend/api/go_http_client/run_model/api_resource_reference.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_resource_type.go b/backend/api/go_http_client/run_model/api_resource_type.go index d0fc5c2d171..f0ca2f5d9ba 100644 --- a/backend/api/go_http_client/run_model/api_resource_type.go +++ b/backend/api/go_http_client/run_model/api_resource_type.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -42,6 +42,15 @@ const ( // APIResourceTypeJOB captures enum value "JOB" APIResourceTypeJOB APIResourceType = "JOB" + + // APIResourceTypePIPELINE captures enum value "PIPELINE" + APIResourceTypePIPELINE APIResourceType = "PIPELINE" + + // APIResourceTypePIPELINEVERSION captures enum value "PIPELINE_VERSION" + APIResourceTypePIPELINEVERSION APIResourceType = "PIPELINE_VERSION" + + // APIResourceTypeNAMESPACE captures enum value "NAMESPACE" + APIResourceTypeNAMESPACE APIResourceType = "NAMESPACE" ) // for schema @@ -49,7 +58,7 @@ var apiResourceTypeEnum []interface{} func init() { var res []APIResourceType - if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB"]`), &res); err != nil { + if err := json.Unmarshal([]byte(`["UNKNOWN_RESOURCE_TYPE","EXPERIMENT","JOB","PIPELINE","PIPELINE_VERSION","NAMESPACE"]`), &res); err != nil { panic(err) } for _, v := range res { diff --git a/backend/api/go_http_client/run_model/api_run.go b/backend/api/go_http_client/run_model/api_run.go index 1489609ba0a..069cbdb845c 100644 --- a/backend/api/go_http_client/run_model/api_run.go +++ b/backend/api/go_http_client/run_model/api_run.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -74,6 +74,9 @@ type APIRun struct { // Format: date-time ScheduledAt strfmt.DateTime `json:"scheduled_at,omitempty"` + // Optional input field. Specify which Kubernetes service account this run uses. + ServiceAccount string `json:"service_account,omitempty"` + // Output. The status of the run. // One of [Pending, Running, Succeeded, Skipped, Failed, Error] Status string `json:"status,omitempty"` diff --git a/backend/api/go_http_client/run_model/api_run_detail.go b/backend/api/go_http_client/run_model/api_run_detail.go index 9064cf124a6..ca044aa792d 100644 --- a/backend/api/go_http_client/run_model/api_run_detail.go +++ b/backend/api/go_http_client/run_model/api_run_detail.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/api_run_metric.go b/backend/api/go_http_client/run_model/api_run_metric.go index fc3eddc2c9c..d8da7a0f55b 100644 --- a/backend/api/go_http_client/run_model/api_run_metric.go +++ b/backend/api/go_http_client/run_model/api_run_metric.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -33,14 +33,15 @@ type APIRunMetric struct { // The display format of metric. Format RunMetricFormat `json:"format,omitempty"` - // Required. The user defined name of the metric. It must between 1 and 63 characters - // long and must conform to the following regular expression: + // Required. The user defined name of the metric. It must between 1 and 63 + // characters long and must conform to the following regular expression: // `[a-z]([-a-z0-9]*[a-z0-9])?`. Name string `json:"name,omitempty"` - // Required. The runtime node ID which reports the metric. The node ID can be found in - // the RunDetail.workflow.Status. Metric with same (node_id, name) - // are considerd as duplicate. Only the first reporting will be recorded. Max length is 128. + // Required. The runtime node ID which reports the metric. The node ID can be + // found in the RunDetail.workflow.Status. Metric with same (node_id, name) + // are considerd as duplicate. Only the first reporting will be recorded. Max + // length is 128. NodeID string `json:"node_id,omitempty"` // The number value of the metric. diff --git a/backend/api/go_http_client/run_model/api_status.go b/backend/api/go_http_client/run_model/api_status.go index af87a395745..8e408f35144 100644 --- a/backend/api/go_http_client/run_model/api_status.go +++ b/backend/api/go_http_client/run_model/api_status.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/protobuf_any.go b/backend/api/go_http_client/run_model/protobuf_any.go index bcaea0482ab..3c1f148cb56 100644 --- a/backend/api/go_http_client/run_model/protobuf_any.go +++ b/backend/api/go_http_client/run_model/protobuf_any.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/report_run_metrics_response_report_run_metric_result.go b/backend/api/go_http_client/run_model/report_run_metrics_response_report_run_metric_result.go index 70b032d0f1c..50b0bc9534e 100644 --- a/backend/api/go_http_client/run_model/report_run_metrics_response_report_run_metric_result.go +++ b/backend/api/go_http_client/run_model/report_run_metrics_response_report_run_metric_result.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/report_run_metrics_response_report_run_metric_result_status.go b/backend/api/go_http_client/run_model/report_run_metrics_response_report_run_metric_result_status.go index c5ce7edf290..a1ed4bba9ed 100644 --- a/backend/api/go_http_client/run_model/report_run_metrics_response_report_run_metric_result_status.go +++ b/backend/api/go_http_client/run_model/report_run_metrics_response_report_run_metric_result_status.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/run_metric_format.go b/backend/api/go_http_client/run_model/run_metric_format.go index 6a4c90ed4ff..943b48584c5 100644 --- a/backend/api/go_http_client/run_model/run_metric_format.go +++ b/backend/api/go_http_client/run_model/run_metric_format.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/run_model/run_storage_state.go b/backend/api/go_http_client/run_model/run_storage_state.go index a3ec6ecf88c..7f5e6504e56 100644 --- a/backend/api/go_http_client/run_model/run_storage_state.go +++ b/backend/api/go_http_client/run_model/run_storage_state.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/visualization_client/visualization_client.go b/backend/api/go_http_client/visualization_client/visualization_client.go index 90034093495..56389cf8546 100644 --- a/backend/api/go_http_client/visualization_client/visualization_client.go +++ b/backend/api/go_http_client/visualization_client/visualization_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/visualization_client/visualization_service/create_visualization_parameters.go b/backend/api/go_http_client/visualization_client/visualization_service/create_visualization_parameters.go index 0f21a81aba5..26ee3aca1d0 100644 --- a/backend/api/go_http_client/visualization_client/visualization_service/create_visualization_parameters.go +++ b/backend/api/go_http_client/visualization_client/visualization_service/create_visualization_parameters.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -79,6 +79,8 @@ type CreateVisualizationParams struct { /*Body*/ Body *visualization_model.APIVisualization + /*Namespace*/ + Namespace string timeout time.Duration Context context.Context @@ -129,6 +131,17 @@ func (o *CreateVisualizationParams) SetBody(body *visualization_model.APIVisuali o.Body = body } +// WithNamespace adds the namespace to the create visualization params +func (o *CreateVisualizationParams) WithNamespace(namespace string) *CreateVisualizationParams { + o.SetNamespace(namespace) + return o +} + +// SetNamespace adds the namespace to the create visualization params +func (o *CreateVisualizationParams) SetNamespace(namespace string) { + o.Namespace = namespace +} + // WriteToRequest writes these params to a swagger request func (o *CreateVisualizationParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { @@ -143,6 +156,11 @@ func (o *CreateVisualizationParams) WriteToRequest(r runtime.ClientRequest, reg } } + // path param namespace + if err := r.SetPathParam("namespace", o.Namespace); err != nil { + return err + } + if len(res) > 0 { return errors.CompositeValidationError(res...) } diff --git a/backend/api/go_http_client/visualization_client/visualization_service/create_visualization_responses.go b/backend/api/go_http_client/visualization_client/visualization_service/create_visualization_responses.go index 5d8bfdc1f61..065fc505e7b 100644 --- a/backend/api/go_http_client/visualization_client/visualization_service/create_visualization_responses.go +++ b/backend/api/go_http_client/visualization_client/visualization_service/create_visualization_responses.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -72,7 +72,7 @@ type CreateVisualizationOK struct { } func (o *CreateVisualizationOK) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/visualizations][%d] createVisualizationOK %+v", 200, o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/visualizations/{namespace}][%d] createVisualizationOK %+v", 200, o.Payload) } func (o *CreateVisualizationOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { @@ -110,7 +110,7 @@ func (o *CreateVisualizationDefault) Code() int { } func (o *CreateVisualizationDefault) Error() string { - return fmt.Sprintf("[POST /apis/v1beta1/visualizations][%d] CreateVisualization default %+v", o._statusCode, o.Payload) + return fmt.Sprintf("[POST /apis/v1beta1/visualizations/{namespace}][%d] CreateVisualization default %+v", o._statusCode, o.Payload) } func (o *CreateVisualizationDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { diff --git a/backend/api/go_http_client/visualization_client/visualization_service/visualization_service_client.go b/backend/api/go_http_client/visualization_client/visualization_service/visualization_service_client.go index 783f663162c..474c706bfab 100644 --- a/backend/api/go_http_client/visualization_client/visualization_service/visualization_service_client.go +++ b/backend/api/go_http_client/visualization_client/visualization_service/visualization_service_client.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -50,7 +50,7 @@ func (a *Client) CreateVisualization(params *CreateVisualizationParams, authInfo result, err := a.transport.Submit(&runtime.ClientOperation{ ID: "CreateVisualization", Method: "POST", - PathPattern: "/apis/v1beta1/visualizations", + PathPattern: "/apis/v1beta1/visualizations/{namespace}", ProducesMediaTypes: []string{"application/json"}, ConsumesMediaTypes: []string{"application/json"}, Schemes: []string{"http", "https"}, diff --git a/backend/api/go_http_client/visualization_model/api_status.go b/backend/api/go_http_client/visualization_model/api_status.go index 2cbe563625d..404ec4ea2fb 100644 --- a/backend/api/go_http_client/visualization_model/api_status.go +++ b/backend/api/go_http_client/visualization_model/api_status.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/visualization_model/api_visualization.go b/backend/api/go_http_client/visualization_model/api_visualization.go index af96286f12b..178bd4f83cf 100644 --- a/backend/api/go_http_client/visualization_model/api_visualization.go +++ b/backend/api/go_http_client/visualization_model/api_visualization.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/visualization_model/api_visualization_type.go b/backend/api/go_http_client/visualization_model/api_visualization_type.go index 26a86a35cac..014391ce40f 100644 --- a/backend/api/go_http_client/visualization_model/api_visualization_type.go +++ b/backend/api/go_http_client/visualization_model/api_visualization_type.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/go_http_client/visualization_model/protobuf_any.go b/backend/api/go_http_client/visualization_model/protobuf_any.go index 34c022d0fdf..7d3ca0ab62e 100644 --- a/backend/api/go_http_client/visualization_model/protobuf_any.go +++ b/backend/api/go_http_client/visualization_model/protobuf_any.go @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/backend/api/job.proto b/backend/api/job.proto index 42829b82c7c..7910740a1fa 100644 --- a/backend/api/job.proto +++ b/backend/api/job.proto @@ -61,7 +61,9 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { } }; + service JobService { + //Create a new job. rpc CreateJob(CreateJobRequest) returns (Job) { option (google.api.http) = { post: "/apis/v1beta1/jobs" @@ -69,30 +71,35 @@ service JobService { }; } + //Find a specific job by ID. rpc GetJob(GetJobRequest) returns (Job) { option (google.api.http) = { get: "/apis/v1beta1/jobs/{id}" }; } + //Find all jobs. rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { option (google.api.http) = { get: "/apis/v1beta1/jobs" }; } + //Restarts a job that was previously stopped. All runs associated with the job will continue. rpc EnableJob(EnableJobRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/jobs/{id}/enable" }; } + //Stops a job and all its associated runs. The job is not deleted. rpc DisableJob(DisableJobRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/jobs/{id}/disable" }; } + //Delete a job. rpc DeleteJob(DeleteJobRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/jobs/{id}" @@ -114,7 +121,7 @@ message ListJobsRequest { string page_token = 1; int32 page_size = 2; - // Can be format of "field_name", "field_name asc" or "field_name des" + // Can be format of "field_name", "field_name asc" or "field_name des". // Ascending by default. string sort_by = 3; @@ -123,8 +130,9 @@ message ListJobsRequest { // resource_reference_key.type=EXPERIMENT&resource_reference_key.id=123 ResourceKey resource_reference_key = 4; - // A base-64 encoded, JSON-serialized Filter protocol buffer (see - // filter.proto). + // A url-encoded, JSON-serialized Filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/pipelines/ + // blob/master/backend/api/filter.proto)). string filter = 5; } @@ -158,7 +166,8 @@ message CronSchedule { // The end time of the cron job google.protobuf.Timestamp end_time = 2; - // The cron string. For details how to compose a cron, visit ttps://en.wikipedia.org/wiki/Cron + // The cron string. For details how to compose a cron, visit + // ttps://en.wikipedia.org/wiki/Cron string cron = 3; } @@ -197,9 +206,12 @@ message Job { // for the scheduled job. PipelineSpec pipeline_spec = 4; - // Optional input field. Specify which resource this run belongs to. + // Optional input field. Specify which resource this job belongs to. repeated ResourceReference resource_references = 5; + // Optional input field. Specify which Kubernetes service account this job uses. + string service_account = 18; + // Required input field. // Specify how many runs can be executed concurrently. Rage [1-10] int64 max_concurrency = 6; @@ -234,4 +246,10 @@ message Job { // Input. Whether the job is enabled or not. bool enabled = 16; + + // Optional input field. Whether the job should catch up if behind schedule. + // If true, the job will only schedule the latest interval if behind schedule. + // If false, the job will catch up on each past interval. + bool no_catchup = 17; } +// Next field number of Job will be 19 \ No newline at end of file diff --git a/backend/api/pipeline.proto b/backend/api/pipeline.proto index fa775bed4f8..e00133202fb 100644 --- a/backend/api/pipeline.proto +++ b/backend/api/pipeline.proto @@ -61,6 +61,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { }; service PipelineService { + //Add a pipeline. rpc CreatePipeline(CreatePipelineRequest) returns (Pipeline) { option (google.api.http) = { post: "/apis/v1beta1/pipelines" @@ -68,29 +69,67 @@ service PipelineService { }; } + //Find a specific pipeline by ID. rpc GetPipeline(GetPipelineRequest) returns (Pipeline) { option (google.api.http) = { get: "/apis/v1beta1/pipelines/{id}" }; } + //Find all pipelines. rpc ListPipelines(ListPipelinesRequest) returns (ListPipelinesResponse) { option (google.api.http) = { get: "/apis/v1beta1/pipelines" }; } + //Delete a pipeline. rpc DeletePipeline(DeletePipelineRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/pipelines/{id}" }; } + //Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. rpc GetTemplate(GetTemplateRequest) returns (GetTemplateResponse) { option (google.api.http) = { get: "/apis/v1beta1/pipelines/{id}/templates" }; } + + rpc CreatePipelineVersion(CreatePipelineVersionRequest) + returns (PipelineVersion) { + option (google.api.http) = { + post: "/apis/v1beta1/pipeline_versions" + body: "version" + }; + } + + rpc GetPipelineVersion(GetPipelineVersionRequest) returns (PipelineVersion) { + option (google.api.http) = { + get: "/apis/v1beta1/pipeline_versions/{version_id}" + }; + } + + rpc ListPipelineVersions(ListPipelineVersionsRequest) + returns (ListPipelineVersionsResponse) { + option (google.api.http) = { + get: "/apis/v1beta1/pipeline_versions" + }; + } + + rpc DeletePipelineVersion(DeletePipelineVersionRequest) + returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/apis/v1beta1/pipeline_versions/{version_id}" + }; + } + + rpc GetPipelineVersionTemplate(GetPipelineVersionTemplateRequest) returns (GetTemplateResponse) { + option (google.api.http) = { + get: "/apis/v1beta1/pipeline_versions/{version_id}/templates" + }; + } } message Url { @@ -115,8 +154,9 @@ message ListPipelinesRequest { // Ascending by default. string sort_by = 3; - // A base-64 encoded, JSON-serialized Filter protocol buffer (see - // filter.proto). + // A url-encoded, JSON-serialized Filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/pipelines/ + // blob/master/backend/api/filter.proto)). string filter = 4; } @@ -138,7 +178,7 @@ message GetTemplateResponse { string template = 1; } -message GetPipelineVersionTemplateRequest{ +message GetPipelineVersionTemplateRequest { string version_id = 1; } @@ -157,11 +197,23 @@ message ListPipelineVersionsRequest { ResourceKey resource_key = 1; int32 page_size = 2; string page_token = 3; + + // Can be format of "field_name", "field_name asc" or "field_name des" + // Ascending by default. + string sort_by = 4; + // A base-64 encoded, JSON-serialized Filter protocol buffer (see + // filter.proto). + string filter = 5; } message ListPipelineVersionsResponse { repeated PipelineVersion versions = 1; string next_page_token = 2; + int32 total_size = 3; +} + +message DeletePipelineVersionRequest { + string version_id = 1; } message Pipeline { @@ -199,8 +251,7 @@ message Pipeline { // Output only. The default version of the pipeline. As of now, the latest // version is used as default. (In the future, if desired by customers, we // can allow them to set default version.) - // TODO(jingzhang36): expose this in API pipeline definition with FE changes. - // PipelineVersion default_version = 8; + PipelineVersion default_version = 8; } message PipelineVersion { @@ -228,4 +279,3 @@ message PipelineVersion { // to. repeated ResourceReference resource_references = 7; } - diff --git a/backend/api/resource_reference.proto b/backend/api/resource_reference.proto index d866843d063..27b474f2d85 100644 --- a/backend/api/resource_reference.proto +++ b/backend/api/resource_reference.proto @@ -22,6 +22,8 @@ enum ResourceType { EXPERIMENT = 1; JOB = 2; PIPELINE = 3; + PIPELINE_VERSION = 4; + NAMESPACE = 5; } enum Relationship { diff --git a/backend/api/run.proto b/backend/api/run.proto index bb1728f0408..78698a3682e 100644 --- a/backend/api/run.proto +++ b/backend/api/run.proto @@ -59,7 +59,9 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { } }; + service RunService { + //Create a new run. rpc CreateRun(CreateRunRequest) returns (RunDetail) { option (google.api.http) = { post: "/apis/v1beta1/runs" @@ -67,30 +69,35 @@ service RunService { }; } + //Find a specific run by ID. rpc GetRun(GetRunRequest) returns (RunDetail) { option (google.api.http) = { get: "/apis/v1beta1/runs/{run_id}" }; } + //Find all runs. rpc ListRuns(ListRunsRequest) returns (ListRunsResponse) { option (google.api.http) = { get: "/apis/v1beta1/runs" }; } + //Archive a run. rpc ArchiveRun(ArchiveRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{id}:archive" }; } + //Restore an archived run. rpc UnarchiveRun(UnarchiveRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{id}:unarchive" }; } + //Delete a run. rpc DeleteRun(DeleteRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/runs/{id}" @@ -98,28 +105,32 @@ service RunService { } // ReportRunMetrics reports metrics of a run. Each metric is reported in its - // own transaction, so this API accepts partial failures. Metric can be uniquely - // identified by (run_id, node_id, name). Duplicate reporting will be + // own transaction, so this API accepts partial failures. Metric can be + // uniquely identified by (run_id, node_id, name). Duplicate reporting will be // ignored by the API. First reporting wins. - rpc ReportRunMetrics(ReportRunMetricsRequest) returns (ReportRunMetricsResponse) { + rpc ReportRunMetrics(ReportRunMetricsRequest) + returns (ReportRunMetricsResponse) { option (google.api.http) = { post: "/apis/v1beta1/runs/{run_id}:reportMetrics" body: "*" }; } + //Find a run's artifact data. rpc ReadArtifact(ReadArtifactRequest) returns (ReadArtifactResponse) { option (google.api.http) = { get: "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read" }; } + //Terminate an active run. rpc TerminateRun(TerminateRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{run_id}/terminate" }; } + //Re-initiate a failed or terminated run. rpc RetryRun(RetryRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{run_id}/retry" @@ -139,7 +150,7 @@ message ListRunsRequest { string page_token = 1; int32 page_size = 2; // Can be format of "field_name", "field_name asc" or "field_name des" - // Ascending by default. + // (Example, "name asc" or "id des"). Ascending by default. string sort_by = 3; // What resource reference to filter on. @@ -147,8 +158,9 @@ message ListRunsRequest { // resource_reference_key.type=EXPERIMENT&resource_reference_key.id=123 ResourceKey resource_reference_key = 4; - // A base-64 encoded, JSON-serialized Filter protocol buffer (see - // filter.proto). + // A url-encoded, JSON-serialized Filter protocol buffer (see + // [filter.proto](https://github.com/kubeflow/pipelines/ + // blob/master/backend/api/filter.proto)). string filter = 5; } @@ -203,6 +215,9 @@ message Run { // Optional input field. Specify which resource this run belongs to. repeated ResourceReference resource_references = 5; + // Optional input field. Specify which Kubernetes service account this run uses. + string service_account = 14; + // Output. The time that the run created. google.protobuf.Timestamp created_at = 6; @@ -228,6 +243,7 @@ message Run { // API. repeated RunMetric metrics = 9; } +// Next field number of Run will be 15 message PipelineRuntime { // Output. The runtime JSON manifest of the pipeline, including the status @@ -245,14 +261,15 @@ message RunDetail { } message RunMetric { - // Required. The user defined name of the metric. It must between 1 and 63 characters - // long and must conform to the following regular expression: + // Required. The user defined name of the metric. It must between 1 and 63 + // characters long and must conform to the following regular expression: // `[a-z]([-a-z0-9]*[a-z0-9])?`. string name = 1; - // Required. The runtime node ID which reports the metric. The node ID can be found in - // the RunDetail.workflow.Status. Metric with same (node_id, name) - // are considerd as duplicate. Only the first reporting will be recorded. Max length is 128. + // Required. The runtime node ID which reports the metric. The node ID can be + // found in the RunDetail.workflow.Status. Metric with same (node_id, name) + // are considerd as duplicate. Only the first reporting will be recorded. Max + // length is 128. string node_id = 2; oneof value { diff --git a/backend/api/swagger/auth.swagger.json b/backend/api/swagger/auth.swagger.json new file mode 100755 index 00000000000..5b8e5beb0e6 --- /dev/null +++ b/backend/api/swagger/auth.swagger.json @@ -0,0 +1,140 @@ +{ + "swagger": "2.0", + "info": { + "title": "backend/api/auth.proto", + "version": "version not set" + }, + "schemes": [ + "http", + "https" + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "paths": { + "/apis/v1beta1/auth": { + "get": { + "operationId": "Authorize", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "properties": {} + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "namespace", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "resources", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNASSIGNED_RESOURCES", + "VIEWERS" + ], + "default": "UNASSIGNED_RESOURCES" + }, + { + "name": "verb", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNASSIGNED_VERB", + "CREATE", + "GET", + "DELETE" + ], + "default": "UNASSIGNED_VERB" + } + ], + "tags": [ + "AuthService" + ] + } + } + }, + "definitions": { + "AuthorizeRequestResources": { + "type": "string", + "enum": [ + "UNASSIGNED_RESOURCES", + "VIEWERS" + ], + "default": "UNASSIGNED_RESOURCES", + "description": "Type of resources in pipelines system." + }, + "AuthorizeRequestVerb": { + "type": "string", + "enum": [ + "UNASSIGNED_VERB", + "CREATE", + "GET", + "DELETE" + ], + "default": "UNASSIGNED_VERB", + "description": "Type of verbs that act on the resources." + }, + "apiStatus": { + "type": "object", + "properties": { + "error": { + "type": "string" + }, + "code": { + "type": "integer", + "format": "int32" + }, + "details": { + "type": "array", + "items": { + "$ref": "#/definitions/protobufAny" + } + } + } + }, + "protobufAny": { + "type": "object", + "properties": { + "type_url": { + "type": "string", + "description": "A URL/resource name that uniquely identifies the type of the serialized\nprotocol buffer message. The last segment of the URL's path must represent\nthe fully qualified name of the type (as in\n`path/google.protobuf.Duration`). The name should be in a canonical form\n(e.g., leading \".\" is not accepted).\n\nIn practice, teams usually precompile into the binary all types that they\nexpect it to use in the context of Any. However, for URLs which use the\nscheme `http`, `https`, or no scheme, one can optionally set up a type\nserver that maps type URLs to message definitions as follows:\n\n* If no scheme is provided, `https` is assumed.\n* An HTTP GET on the URL must yield a [google.protobuf.Type][]\n value in binary format, or produce an error.\n* Applications are allowed to cache lookup results based on the\n URL, or have them precompiled into a binary to avoid any\n lookup. Therefore, binary compatibility needs to be preserved\n on changes to types. (Use versioned type names to manage\n breaking changes.)\n\nNote: this functionality is not currently available in the official\nprotobuf release, and it is not used for type URLs beginning with\ntype.googleapis.com.\n\nSchemes other than `http`, `https` (or the empty scheme) might be\nused with implementation specific semantics." + }, + "value": { + "type": "string", + "format": "byte", + "description": "Must be a valid serialized protocol buffer of the above specified type." + } + }, + "description": "`Any` contains an arbitrary serialized protocol buffer message along with a\nURL that describes the type of the serialized message.\n\nProtobuf library provides support to pack/unpack Any values in the form\nof utility functions or additional generated methods of the Any type.\n\nExample 1: Pack and unpack a message in C++.\n\n Foo foo = ...;\n Any any;\n any.PackFrom(foo);\n ...\n if (any.UnpackTo(\u0026foo)) {\n ...\n }\n\nExample 2: Pack and unpack a message in Java.\n\n Foo foo = ...;\n Any any = Any.pack(foo);\n ...\n if (any.is(Foo.class)) {\n foo = any.unpack(Foo.class);\n }\n\n Example 3: Pack and unpack a message in Python.\n\n foo = Foo(...)\n any = Any()\n any.Pack(foo)\n ...\n if any.Is(Foo.DESCRIPTOR):\n any.Unpack(foo)\n ...\n\n Example 4: Pack and unpack a message in Go\n\n foo := \u0026pb.Foo{...}\n any, err := ptypes.MarshalAny(foo)\n ...\n foo := \u0026pb.Foo{}\n if err := ptypes.UnmarshalAny(any, foo); err != nil {\n ...\n }\n\nThe pack methods provided by protobuf library will by default use\n'type.googleapis.com/full.type.name' as the type URL and the unpack\nmethods only use the fully qualified type name after the last '/'\nin the type URL, for example \"foo.bar.com/x/y.z\" will yield type\nname \"y.z\".\n\n\nJSON\n====\nThe JSON representation of an `Any` value uses the regular\nrepresentation of the deserialized, embedded message, with an\nadditional field `@type` which contains the type URL. Example:\n\n package google.profile;\n message Person {\n string first_name = 1;\n string last_name = 2;\n }\n\n {\n \"@type\": \"type.googleapis.com/google.profile.Person\",\n \"firstName\": \u003cstring\u003e,\n \"lastName\": \u003cstring\u003e\n }\n\nIf the embedded message type is well-known and has a custom JSON\nrepresentation, that representation will be embedded adding a field\n`value` which holds the custom JSON in addition to the `@type`\nfield. Example (for message [google.protobuf.Duration][]):\n\n {\n \"@type\": \"type.googleapis.com/google.protobuf.Duration\",\n \"value\": \"1.212s\"\n }" + } + }, + "securityDefinitions": { + "Bearer": { + "type": "apiKey", + "name": "authorization", + "in": "header" + } + }, + "security": [ + { + "Bearer": [] + } + ] +} diff --git a/backend/api/swagger/experiment.swagger.json b/backend/api/swagger/experiment.swagger.json index 231960c43c7..8c5976150a1 100644 --- a/backend/api/swagger/experiment.swagger.json +++ b/backend/api/swagger/experiment.swagger.json @@ -17,6 +17,7 @@ "paths": { "/apis/v1beta1/experiments": { "get": { + "summary": "Find all experiments.", "operationId": "ListExperiment", "responses": { "200": { @@ -55,7 +56,30 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\n[filter.proto](https://github.com/kubeflow/pipelines/\nblob/master/backend/api/filter.proto)).", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "resource_reference_key.type", + "description": "The type of the resource that referred to.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNKNOWN_RESOURCE_TYPE", + "EXPERIMENT", + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" + ], + "default": "UNKNOWN_RESOURCE_TYPE" + }, + { + "name": "resource_reference_key.id", + "description": "The ID of the resource that referred to.", "in": "query", "required": false, "type": "string" @@ -66,6 +90,7 @@ ] }, "post": { + "summary": "Create a new experiment.", "operationId": "CreateExperiment", "responses": { "200": { @@ -84,7 +109,7 @@ "parameters": [ { "name": "body", - "description": "The experiment to be created", + "description": "The experiment to be created.", "in": "body", "required": true, "schema": { @@ -99,6 +124,7 @@ }, "/apis/v1beta1/experiments/{id}": { "get": { + "summary": "Find a specific experiment by ID.", "operationId": "GetExperiment", "responses": { "200": { @@ -117,7 +143,7 @@ "parameters": [ { "name": "id", - "description": "The ID of the experiment to be retrieved", + "description": "The ID of the experiment to be retrieved.", "in": "path", "required": true, "type": "string" @@ -128,6 +154,7 @@ ] }, "delete": { + "summary": "Delete an experiment.", "operationId": "DeleteExperiment", "responses": { "200": { @@ -156,9 +183,80 @@ "ExperimentService" ] } + }, + "/apis/v1beta1/experiments/{id}:archive": { + "post": { + "summary": "Archive an experiment.", + "operationId": "ArchiveExperiment", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "properties": {} + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "ExperimentService" + ] + } + }, + "/apis/v1beta1/experiments/{id}:unarchive": { + "post": { + "summary": "Restore an archived experiment.", + "operationId": "UnarchiveExperiment", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "properties": {} + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "ExperimentService" + ] + } } }, "definitions": { + "ExperimentStorageState": { + "type": "string", + "enum": [ + "STORAGESTATE_UNSPECIFIED", + "STORAGESTATE_AVAILABLE", + "STORAGESTATE_ARCHIVED" + ], + "default": "STORAGESTATE_UNSPECIFIED" + }, "apiExperiment": { "type": "object", "properties": { @@ -178,6 +276,16 @@ "type": "string", "format": "date-time", "description": "Output. The time that the experiment created." + }, + "resource_references": { + "type": "array", + "items": { + "$ref": "#/definitions/apiResourceReference" + }, + "description": "Optional input field. Specify which resource this run belongs to.\nFor Experiment, the only valid resource reference is a single Namespace." + }, + "storage_state": { + "$ref": "#/definitions/ExperimentStorageState" } } }, @@ -202,6 +310,56 @@ } } }, + "apiRelationship": { + "type": "string", + "enum": [ + "UNKNOWN_RELATIONSHIP", + "OWNER", + "CREATOR" + ], + "default": "UNKNOWN_RELATIONSHIP" + }, + "apiResourceKey": { + "type": "object", + "properties": { + "type": { + "$ref": "#/definitions/apiResourceType", + "description": "The type of the resource that referred to." + }, + "id": { + "type": "string", + "description": "The ID of the resource that referred to." + } + } + }, + "apiResourceReference": { + "type": "object", + "properties": { + "key": { + "$ref": "#/definitions/apiResourceKey" + }, + "name": { + "type": "string", + "description": "The name of the resource that referred to." + }, + "relationship": { + "$ref": "#/definitions/apiRelationship", + "description": "Required field. The relationship from referred resource to the object." + } + } + }, + "apiResourceType": { + "type": "string", + "enum": [ + "UNKNOWN_RESOURCE_TYPE", + "EXPERIMENT", + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" + ], + "default": "UNKNOWN_RESOURCE_TYPE" + }, "apiStatus": { "type": "object", "properties": { diff --git a/backend/api/swagger/job.swagger.json b/backend/api/swagger/job.swagger.json index 7e4ffb27192..e71c7ffd941 100644 --- a/backend/api/swagger/job.swagger.json +++ b/backend/api/swagger/job.swagger.json @@ -17,6 +17,7 @@ "paths": { "/apis/v1beta1/jobs": { "get": { + "summary": "Find all jobs.", "operationId": "ListJobs", "responses": { "200": { @@ -48,7 +49,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\".\nAscending by default.", "in": "query", "required": false, "type": "string" @@ -62,7 +63,10 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -75,7 +79,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\n[filter.proto](https://github.com/kubeflow/pipelines/\nblob/master/backend/api/filter.proto)).", "in": "query", "required": false, "type": "string" @@ -86,6 +90,7 @@ ] }, "post": { + "summary": "Create a new job.", "operationId": "CreateJob", "responses": { "200": { @@ -119,6 +124,7 @@ }, "/apis/v1beta1/jobs/{id}": { "get": { + "summary": "Find a specific job by ID.", "operationId": "GetJob", "responses": { "200": { @@ -148,6 +154,7 @@ ] }, "delete": { + "summary": "Delete a job.", "operationId": "DeleteJob", "responses": { "200": { @@ -179,6 +186,7 @@ }, "/apis/v1beta1/jobs/{id}/disable": { "post": { + "summary": "Stops a job and all its associated runs. The job is not deleted.", "operationId": "DisableJob", "responses": { "200": { @@ -210,6 +218,7 @@ }, "/apis/v1beta1/jobs/{id}/enable": { "post": { + "summary": "Restarts a job that was previously stopped. All runs associated with the job will continue.", "operationId": "EnableJob", "responses": { "200": { @@ -266,7 +275,7 @@ }, "cron": { "type": "string", - "title": "The cron string. For details how to compose a cron, visit ttps://en.wikipedia.org/wiki/Cron" + "title": "The cron string. For details how to compose a cron, visit\nttps://en.wikipedia.org/wiki/Cron" } }, "title": "CronSchedule allow scheduling the job with unix-like cron" @@ -295,7 +304,11 @@ "items": { "$ref": "#/definitions/apiResourceReference" }, - "description": "Optional input field. Specify which resource this run belongs to." + "description": "Optional input field. Specify which resource this job belongs to." + }, + "service_account": { + "type": "string", + "description": "Optional input field. Specify which Kubernetes service account this job uses." }, "max_concurrency": { "type": "string", @@ -331,6 +344,11 @@ "type": "boolean", "format": "boolean", "description": "Input. Whether the job is enabled or not." + }, + "no_catchup": { + "type": "boolean", + "format": "boolean", + "description": "Optional input field. Whether the job should catch up if behind schedule.\nIf true, the job will only schedule the latest interval if behind schedule.\nIf false, the job will catch up on each past interval." } } }, @@ -456,7 +474,10 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" ], "default": "UNKNOWN_RESOURCE_TYPE" }, diff --git a/backend/api/swagger/kfp_api_single_file.swagger.json b/backend/api/swagger/kfp_api_single_file.swagger.json index 88c96c352cf..86f1df96fbf 100644 --- a/backend/api/swagger/kfp_api_single_file.swagger.json +++ b/backend/api/swagger/kfp_api_single_file.swagger.json @@ -2,7 +2,7 @@ "swagger": "2.0", "info": { "title": "Kubeflow Pipelines API", - "version": "0.1.20", + "version": "0.1.38", "description": "This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition." }, "schemes": [ @@ -18,6 +18,7 @@ "paths": { "/apis/v1beta1/runs": { "get": { + "summary": "Find all runs.", "operationId": "ListRuns", "responses": { "200": { @@ -49,7 +50,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\n(Example, \"name asc\" or \"id des\"). Ascending by default.", "in": "query", "required": false, "type": "string" @@ -63,7 +64,10 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -76,7 +80,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\n[filter.proto](https://github.com/kubeflow/pipelines/\nblob/master/backend/api/filter.proto)).", "in": "query", "required": false, "type": "string" @@ -87,6 +91,7 @@ ] }, "post": { + "summary": "Create a new run.", "operationId": "CreateRun", "responses": { "200": { @@ -119,6 +124,7 @@ }, "/apis/v1beta1/runs/{id}": { "delete": { + "summary": "Delete a run.", "operationId": "DeleteRun", "responses": { "200": { @@ -149,6 +155,7 @@ }, "/apis/v1beta1/runs/{id}:archive": { "post": { + "summary": "Archive a run.", "operationId": "ArchiveRun", "responses": { "200": { @@ -179,6 +186,7 @@ }, "/apis/v1beta1/runs/{id}:unarchive": { "post": { + "summary": "Restore an archived run.", "operationId": "UnarchiveRun", "responses": { "200": { @@ -209,6 +217,7 @@ }, "/apis/v1beta1/runs/{run_id}": { "get": { + "summary": "Find a specific run by ID.", "operationId": "GetRun", "responses": { "200": { @@ -239,6 +248,7 @@ }, "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read": { "get": { + "summary": "Find a run's artifact data.", "operationId": "ReadArtifact", "responses": { "200": { @@ -284,6 +294,7 @@ }, "/apis/v1beta1/runs/{run_id}/retry": { "post": { + "summary": "Re-initiate a failed or terminated run.", "operationId": "RetryRun", "responses": { "200": { @@ -314,6 +325,7 @@ }, "/apis/v1beta1/runs/{run_id}/terminate": { "post": { + "summary": "Terminate an active run.", "operationId": "TerminateRun", "responses": { "200": { @@ -344,7 +356,7 @@ }, "/apis/v1beta1/runs/{run_id}:reportMetrics": { "post": { - "summary": "ReportRunMetrics reports metrics of a run. Each metric is reported in its\nown transaction, so this API accepts partial failures. Metric can be uniquely\nidentified by (run_id, node_id, name). Duplicate reporting will be\nignored by the API. First reporting wins.", + "summary": "ReportRunMetrics reports metrics of a run. Each metric is reported in its\nown transaction, so this API accepts partial failures. Metric can be\nuniquely identified by (run_id, node_id, name). Duplicate reporting will be\nignored by the API. First reporting wins.", "operationId": "ReportRunMetrics", "responses": { "200": { @@ -384,6 +396,7 @@ }, "/apis/v1beta1/jobs": { "get": { + "summary": "Find all jobs.", "operationId": "ListJobs", "responses": { "200": { @@ -415,7 +428,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\".\nAscending by default.", "in": "query", "required": false, "type": "string" @@ -429,7 +442,10 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -442,7 +458,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\n[filter.proto](https://github.com/kubeflow/pipelines/\nblob/master/backend/api/filter.proto)).", "in": "query", "required": false, "type": "string" @@ -453,6 +469,7 @@ ] }, "post": { + "summary": "Create a new job.", "operationId": "CreateJob", "responses": { "200": { @@ -486,6 +503,7 @@ }, "/apis/v1beta1/jobs/{id}": { "get": { + "summary": "Find a specific job by ID.", "operationId": "GetJob", "responses": { "200": { @@ -515,6 +533,7 @@ ] }, "delete": { + "summary": "Delete a job.", "operationId": "DeleteJob", "responses": { "200": { @@ -546,6 +565,7 @@ }, "/apis/v1beta1/jobs/{id}/disable": { "post": { + "summary": "Stops a job and all its associated runs. The job is not deleted.", "operationId": "DisableJob", "responses": { "200": { @@ -577,6 +597,7 @@ }, "/apis/v1beta1/jobs/{id}/enable": { "post": { + "summary": "Restarts a job that was previously stopped. All runs associated with the job will continue.", "operationId": "EnableJob", "responses": { "200": { @@ -606,8 +627,202 @@ ] } }, + "/apis/v1beta1/pipeline_versions": { + "get": { + "operationId": "ListPipelineVersions", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/apiListPipelineVersionsResponse" + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "resource_key.type", + "description": "The type of the resource that referred to.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNKNOWN_RESOURCE_TYPE", + "EXPERIMENT", + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" + ], + "default": "UNKNOWN_RESOURCE_TYPE" + }, + { + "name": "resource_key.id", + "description": "The ID of the resource that referred to.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "page_token", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filter", + "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "PipelineService" + ] + }, + "post": { + "operationId": "CreatePipelineVersion", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/apiPipelineVersion" + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "description": "ResourceReference inside PipelineVersion specifies the pipeline that this\nversion belongs to.", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/apiPipelineVersion" + } + } + ], + "tags": [ + "PipelineService" + ] + } + }, + "/apis/v1beta1/pipeline_versions/{version_id}": { + "get": { + "operationId": "GetPipelineVersion", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/apiPipelineVersion" + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "version_id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "PipelineService" + ] + }, + "delete": { + "operationId": "DeletePipelineVersion", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "properties": {} + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "version_id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "PipelineService" + ] + } + }, + "/apis/v1beta1/pipeline_versions/{version_id}/templates": { + "get": { + "operationId": "GetPipelineVersionTemplate", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/apiGetTemplateResponse" + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "version_id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "PipelineService" + ] + } + }, "/apis/v1beta1/pipelines": { "get": { + "summary": "Find all pipelines.", "operationId": "ListPipelines", "responses": { "200": { @@ -646,7 +861,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\n[filter.proto](https://github.com/kubeflow/pipelines/\nblob/master/backend/api/filter.proto)).", "in": "query", "required": false, "type": "string" @@ -657,6 +872,7 @@ ] }, "post": { + "summary": "Add a pipeline.", "operationId": "CreatePipeline", "responses": { "200": { @@ -689,6 +905,7 @@ }, "/apis/v1beta1/pipelines/{id}": { "get": { + "summary": "Find a specific pipeline by ID.", "operationId": "GetPipeline", "responses": { "200": { @@ -717,6 +934,7 @@ ] }, "delete": { + "summary": "Delete a pipeline.", "operationId": "DeletePipeline", "responses": { "200": { @@ -747,6 +965,7 @@ }, "/apis/v1beta1/pipelines/{id}/templates": { "get": { + "summary": "Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided.", "operationId": "GetTemplate", "responses": { "200": { @@ -777,6 +996,7 @@ }, "/apis/v1beta1/experiments": { "get": { + "summary": "Find all experiments.", "operationId": "ListExperiment", "responses": { "200": { @@ -815,7 +1035,30 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\n[filter.proto](https://github.com/kubeflow/pipelines/\nblob/master/backend/api/filter.proto)).", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "resource_reference_key.type", + "description": "The type of the resource that referred to.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNKNOWN_RESOURCE_TYPE", + "EXPERIMENT", + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" + ], + "default": "UNKNOWN_RESOURCE_TYPE" + }, + { + "name": "resource_reference_key.id", + "description": "The ID of the resource that referred to.", "in": "query", "required": false, "type": "string" @@ -826,6 +1069,7 @@ ] }, "post": { + "summary": "Create a new experiment.", "operationId": "CreateExperiment", "responses": { "200": { @@ -844,7 +1088,7 @@ "parameters": [ { "name": "body", - "description": "The experiment to be created", + "description": "The experiment to be created.", "in": "body", "required": true, "schema": { @@ -859,6 +1103,7 @@ }, "/apis/v1beta1/experiments/{id}": { "get": { + "summary": "Find a specific experiment by ID.", "operationId": "GetExperiment", "responses": { "200": { @@ -877,7 +1122,7 @@ "parameters": [ { "name": "id", - "description": "The ID of the experiment to be retrieved", + "description": "The ID of the experiment to be retrieved.", "in": "path", "required": true, "type": "string" @@ -888,6 +1133,7 @@ ] }, "delete": { + "summary": "Delete an experiment.", "operationId": "DeleteExperiment", "responses": { "200": { @@ -917,6 +1163,68 @@ ] } }, + "/apis/v1beta1/experiments/{id}:archive": { + "post": { + "summary": "Archive an experiment.", + "operationId": "ArchiveExperiment", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "properties": {} + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "ExperimentService" + ] + } + }, + "/apis/v1beta1/experiments/{id}:unarchive": { + "post": { + "summary": "Restore an archived experiment.", + "operationId": "UnarchiveExperiment", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "properties": {} + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "ExperimentService" + ] + } + }, "/apis/v1beta1/pipelines/upload": { "post": { "operationId": "UploadPipeline", @@ -953,6 +1261,61 @@ "in": "query", "required": false, "type": "string" + }, + { + "name": "description", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "PipelineUploadService" + ] + } + }, + "/apis/v1beta1/pipelines/upload_version": { + "post": { + "operationId": "UploadPipelineVersion", + "consumes": [ + "multipart/form-data" + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiPipelineVersion" + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "uploadfile", + "in": "formData", + "required": true, + "type": "file", + "description": "The pipeline to upload. Maximum size of 32MB is supported." + }, + { + "name": "name", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "pipelineid", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -1163,7 +1526,10 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -1196,6 +1562,10 @@ }, "description": "Optional input field. Specify which resource this run belongs to." }, + "service_account": { + "type": "string", + "description": "Optional input field. Specify which Kubernetes service account this run uses." + }, "created_at": { "type": "string", "format": "date-time", @@ -1244,11 +1614,11 @@ "properties": { "name": { "type": "string", - "description": "Required. The user defined name of the metric. It must between 1 and 63 characters\nlong and must conform to the following regular expression:\n`[a-z]([-a-z0-9]*[a-z0-9])?`." + "description": "Required. The user defined name of the metric. It must between 1 and 63\ncharacters long and must conform to the following regular expression:\n`[a-z]([-a-z0-9]*[a-z0-9])?`." }, "node_id": { "type": "string", - "description": "Required. The runtime node ID which reports the metric. The node ID can be found in\nthe RunDetail.workflow.Status. Metric with same (node_id, name)\nare considerd as duplicate. Only the first reporting will be recorded. Max length is 128." + "description": "Required. The runtime node ID which reports the metric. The node ID can be\nfound in the RunDetail.workflow.Status. Metric with same (node_id, name)\nare considerd as duplicate. Only the first reporting will be recorded. Max\nlength is 128." }, "number_value": { "type": "number", @@ -1319,7 +1689,7 @@ }, "cron": { "type": "string", - "title": "The cron string. For details how to compose a cron, visit ttps://en.wikipedia.org/wiki/Cron" + "title": "The cron string. For details how to compose a cron, visit\nttps://en.wikipedia.org/wiki/Cron" } }, "title": "CronSchedule allow scheduling the job with unix-like cron" @@ -1348,7 +1718,11 @@ "items": { "$ref": "#/definitions/apiResourceReference" }, - "description": "Optional input field. Specify which resource this run belongs to." + "description": "Optional input field. Specify which resource this job belongs to." + }, + "service_account": { + "type": "string", + "description": "Optional input field. Specify which Kubernetes service account this job uses." }, "max_concurrency": { "type": "string", @@ -1384,6 +1758,11 @@ "type": "boolean", "format": "boolean", "description": "Input. Whether the job is enabled or not." + }, + "no_catchup": { + "type": "boolean", + "format": "boolean", + "description": "Optional input field. Whether the job should catch up if behind schedule.\nIf true, the job will only schedule the latest interval if behind schedule.\nIf false, the job will catch up on each past interval." } } }, @@ -1447,6 +1826,24 @@ } } }, + "apiListPipelineVersionsResponse": { + "type": "object", + "properties": { + "versions": { + "type": "array", + "items": { + "$ref": "#/definitions/apiPipelineVersion" + } + }, + "next_page_token": { + "type": "string" + }, + "total_size": { + "type": "integer", + "format": "int32" + } + } + }, "apiListPipelinesResponse": { "type": "object", "properties": { @@ -1490,15 +1887,60 @@ "items": { "$ref": "#/definitions/apiParameter" }, - "description": "Output. The input parameters for this pipeline." + "description": "Output. The input parameters for this pipeline.\nTODO(jingzhang36): replace this parameters field with the parameters field\ninside PipelineVersion when all usage of the former has been changed to use\nthe latter." }, "url": { "$ref": "#/definitions/apiUrl", - "description": "The URL to the source of the pipeline. This is required when creating the\npipeine through CreatePipeline API." + "description": "The URL to the source of the pipeline. This is required when creating the\npipeine through CreatePipeline API.\nTODO(jingzhang36): replace this url field with the code_source_urls field\ninside PipelineVersion when all usage of the former has been changed to use\nthe latter." }, "error": { "type": "string", "description": "In case any error happens retrieving a pipeline field, only pipeline ID\nand the error message is returned. Client has the flexibility of choosing\nhow to handle error. This is especially useful during listing call." + }, + "default_version": { + "$ref": "#/definitions/apiPipelineVersion", + "title": "Output only. The default version of the pipeline. As of now, the latest\nversion is used as default. (In the future, if desired by customers, we\ncan allow them to set default version.)", + "readOnly": true + } + } + }, + "apiPipelineVersion": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Output. Unique version ID. Generated by API server." + }, + "name": { + "type": "string", + "description": "Optional input field. Version name provided by user." + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "Output. The time this pipeline version is created." + }, + "parameters": { + "type": "array", + "items": { + "$ref": "#/definitions/apiParameter" + }, + "description": "Output. The input parameters for this pipeline." + }, + "code_source_url": { + "type": "string", + "description": "Input. Optional. Pipeline version code source." + }, + "package_url": { + "$ref": "#/definitions/apiUrl", + "description": "Input. Required. Pipeline version package url.\nWhe calling CreatePipelineVersion API method, need to provide one package\nfile location." + }, + "resource_references": { + "type": "array", + "items": { + "$ref": "#/definitions/apiResourceReference" + }, + "description": "Input. Required. E.g., specify which pipeline this pipeline version belongs\nto." } } }, @@ -1510,6 +1952,15 @@ } } }, + "ExperimentStorageState": { + "type": "string", + "enum": [ + "STORAGESTATE_UNSPECIFIED", + "STORAGESTATE_AVAILABLE", + "STORAGESTATE_ARCHIVED" + ], + "default": "STORAGESTATE_UNSPECIFIED" + }, "apiExperiment": { "type": "object", "properties": { @@ -1529,6 +1980,16 @@ "type": "string", "format": "date-time", "description": "Output. The time that the experiment created." + }, + "resource_references": { + "type": "array", + "items": { + "$ref": "#/definitions/apiResourceReference" + }, + "description": "Optional input field. Specify which resource this run belongs to.\nFor Experiment, the only valid resource reference is a single Namespace." + }, + "storage_state": { + "$ref": "#/definitions/ExperimentStorageState" } } }, diff --git a/backend/api/swagger/pipeline.swagger.json b/backend/api/swagger/pipeline.swagger.json index 5f77efa2d1e..f83c5f06def 100644 --- a/backend/api/swagger/pipeline.swagger.json +++ b/backend/api/swagger/pipeline.swagger.json @@ -15,8 +15,202 @@ "application/json" ], "paths": { + "/apis/v1beta1/pipeline_versions": { + "get": { + "operationId": "ListPipelineVersions", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/apiListPipelineVersionsResponse" + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "resource_key.type", + "description": "The type of the resource that referred to.", + "in": "query", + "required": false, + "type": "string", + "enum": [ + "UNKNOWN_RESOURCE_TYPE", + "EXPERIMENT", + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" + ], + "default": "UNKNOWN_RESOURCE_TYPE" + }, + { + "name": "resource_key.id", + "description": "The ID of the resource that referred to.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "page_size", + "in": "query", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "page_token", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "sort_by", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "filter", + "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "PipelineService" + ] + }, + "post": { + "operationId": "CreatePipelineVersion", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/apiPipelineVersion" + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "body", + "description": "ResourceReference inside PipelineVersion specifies the pipeline that this\nversion belongs to.", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/apiPipelineVersion" + } + } + ], + "tags": [ + "PipelineService" + ] + } + }, + "/apis/v1beta1/pipeline_versions/{version_id}": { + "get": { + "operationId": "GetPipelineVersion", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/apiPipelineVersion" + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "version_id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "PipelineService" + ] + }, + "delete": { + "operationId": "DeletePipelineVersion", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "properties": {} + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "version_id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "PipelineService" + ] + } + }, + "/apis/v1beta1/pipeline_versions/{version_id}/templates": { + "get": { + "operationId": "GetPipelineVersionTemplate", + "responses": { + "200": { + "description": "A successful response.", + "schema": { + "$ref": "#/definitions/apiGetTemplateResponse" + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "version_id", + "in": "path", + "required": true, + "type": "string" + } + ], + "tags": [ + "PipelineService" + ] + } + }, "/apis/v1beta1/pipelines": { "get": { + "summary": "Find all pipelines.", "operationId": "ListPipelines", "responses": { "200": { @@ -55,7 +249,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\n[filter.proto](https://github.com/kubeflow/pipelines/\nblob/master/backend/api/filter.proto)).", "in": "query", "required": false, "type": "string" @@ -66,6 +260,7 @@ ] }, "post": { + "summary": "Add a pipeline.", "operationId": "CreatePipeline", "responses": { "200": { @@ -98,6 +293,7 @@ }, "/apis/v1beta1/pipelines/{id}": { "get": { + "summary": "Find a specific pipeline by ID.", "operationId": "GetPipeline", "responses": { "200": { @@ -126,6 +322,7 @@ ] }, "delete": { + "summary": "Delete a pipeline.", "operationId": "DeletePipeline", "responses": { "200": { @@ -156,6 +353,7 @@ }, "/apis/v1beta1/pipelines/{id}/templates": { "get": { + "summary": "Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided.", "operationId": "GetTemplate", "responses": { "200": { @@ -194,6 +392,24 @@ } } }, + "apiListPipelineVersionsResponse": { + "type": "object", + "properties": { + "versions": { + "type": "array", + "items": { + "$ref": "#/definitions/apiPipelineVersion" + } + }, + "next_page_token": { + "type": "string" + }, + "total_size": { + "type": "integer", + "format": "int32" + } + } + }, "apiListPipelinesResponse": { "type": "object", "properties": { @@ -248,18 +464,113 @@ "items": { "$ref": "#/definitions/apiParameter" }, - "description": "Output. The input parameters for this pipeline." + "description": "Output. The input parameters for this pipeline.\nTODO(jingzhang36): replace this parameters field with the parameters field\ninside PipelineVersion when all usage of the former has been changed to use\nthe latter." }, "url": { "$ref": "#/definitions/apiUrl", - "description": "The URL to the source of the pipeline. This is required when creating the\npipeine through CreatePipeline API." + "description": "The URL to the source of the pipeline. This is required when creating the\npipeine through CreatePipeline API.\nTODO(jingzhang36): replace this url field with the code_source_urls field\ninside PipelineVersion when all usage of the former has been changed to use\nthe latter." }, "error": { "type": "string", "description": "In case any error happens retrieving a pipeline field, only pipeline ID\nand the error message is returned. Client has the flexibility of choosing\nhow to handle error. This is especially useful during listing call." + }, + "default_version": { + "$ref": "#/definitions/apiPipelineVersion", + "title": "Output only. The default version of the pipeline. As of now, the latest\nversion is used as default. (In the future, if desired by customers, we\ncan allow them to set default version.)", + "readOnly": true } } }, + "apiPipelineVersion": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Output. Unique version ID. Generated by API server." + }, + "name": { + "type": "string", + "description": "Optional input field. Version name provided by user." + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "Output. The time this pipeline version is created." + }, + "parameters": { + "type": "array", + "items": { + "$ref": "#/definitions/apiParameter" + }, + "description": "Output. The input parameters for this pipeline." + }, + "code_source_url": { + "type": "string", + "description": "Input. Optional. Pipeline version code source." + }, + "package_url": { + "$ref": "#/definitions/apiUrl", + "description": "Input. Required. Pipeline version package url.\nWhe calling CreatePipelineVersion API method, need to provide one package\nfile location." + }, + "resource_references": { + "type": "array", + "items": { + "$ref": "#/definitions/apiResourceReference" + }, + "description": "Input. Required. E.g., specify which pipeline this pipeline version belongs\nto." + } + } + }, + "apiRelationship": { + "type": "string", + "enum": [ + "UNKNOWN_RELATIONSHIP", + "OWNER", + "CREATOR" + ], + "default": "UNKNOWN_RELATIONSHIP" + }, + "apiResourceKey": { + "type": "object", + "properties": { + "type": { + "$ref": "#/definitions/apiResourceType", + "description": "The type of the resource that referred to." + }, + "id": { + "type": "string", + "description": "The ID of the resource that referred to." + } + } + }, + "apiResourceReference": { + "type": "object", + "properties": { + "key": { + "$ref": "#/definitions/apiResourceKey" + }, + "name": { + "type": "string", + "description": "The name of the resource that referred to." + }, + "relationship": { + "$ref": "#/definitions/apiRelationship", + "description": "Required field. The relationship from referred resource to the object." + } + } + }, + "apiResourceType": { + "type": "string", + "enum": [ + "UNKNOWN_RESOURCE_TYPE", + "EXPERIMENT", + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" + ], + "default": "UNKNOWN_RESOURCE_TYPE" + }, "apiStatus": { "type": "object", "properties": { diff --git a/backend/api/swagger/pipeline.upload.swagger.json b/backend/api/swagger/pipeline.upload.swagger.json index 42d1e73d9c5..9162e55cbe7 100644 --- a/backend/api/swagger/pipeline.upload.swagger.json +++ b/backend/api/swagger/pipeline.upload.swagger.json @@ -45,6 +45,61 @@ "in": "query", "required": false, "type": "string" + }, + { + "name": "description", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "PipelineUploadService" + ] + } + }, + "/apis/v1beta1/pipelines/upload_version": { + "post": { + "operationId": "UploadPipelineVersion", + "consumes": [ + "multipart/form-data" + ], + "produces": [ + "application/json" + ], + "responses": { + "200": { + "description": "", + "schema": { + "$ref": "#/definitions/apiPipelineVersion" + } + }, + "default": { + "description": "", + "schema": { + "$ref": "#/definitions/apiStatus" + } + } + }, + "parameters": [ + { + "name": "uploadfile", + "in": "formData", + "required": true, + "type": "file", + "description": "The pipeline to upload. Maximum size of 32MB is supported." + }, + { + "name": "name", + "in": "query", + "required": false, + "type": "string" + }, + { + "name": "pipelineid", + "in": "query", + "required": false, + "type": "string" } ], "tags": [ @@ -93,6 +148,96 @@ } } }, + "apiPipelineVersion": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Output. Unique version ID. Generated by API server." + }, + "name": { + "type": "string", + "description": "Optional input field. Version name provided by user." + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "Output. The time this pipeline version is created." + }, + "parameters": { + "type": "array", + "items": { + "$ref": "#/definitions/apiParameter" + }, + "description": "Output. The input parameters for this pipeline." + }, + "code_source_url": { + "type": "string", + "description": "Input. Optional. Pipeline version code source." + }, + "package_url": { + "$ref": "#/definitions/apiUrl", + "description": "Input. Required. Pipeline version package url.\nWhe calling CreatePipelineVersion API method, need to provide one package\nfile location." + }, + "resource_references": { + "type": "array", + "items": { + "$ref": "#/definitions/apiResourceReference" + }, + "description": "Input. Required. E.g., specify which pipeline this pipeline version belongs\nto." + } + } + }, + "apiRelationship": { + "type": "string", + "enum": [ + "UNKNOWN_RELATIONSHIP", + "OWNER", + "CREATOR" + ], + "default": "UNKNOWN_RELATIONSHIP" + }, + "apiResourceKey": { + "type": "object", + "properties": { + "type": { + "$ref": "#/definitions/apiResourceType", + "description": "The type of the resource that referred to." + }, + "id": { + "type": "string", + "description": "The ID of the resource that referred to." + } + } + }, + "apiResourceReference": { + "type": "object", + "properties": { + "key": { + "$ref": "#/definitions/apiResourceKey" + }, + "name": { + "type": "string", + "description": "The name of the resource that referred to." + }, + "relationship": { + "$ref": "#/definitions/apiRelationship", + "description": "Required field. The relationship from referred resource to the object." + } + } + }, + "apiResourceType": { + "type": "string", + "enum": [ + "UNKNOWN_RESOURCE_TYPE", + "EXPERIMENT", + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" + ], + "default": "UNKNOWN_RESOURCE_TYPE" + }, "apiStatus": { "type": "object", "properties": { @@ -147,4 +292,4 @@ "Bearer": [] } ] -} \ No newline at end of file +} diff --git a/backend/api/swagger/run.swagger.json b/backend/api/swagger/run.swagger.json index d944bfa54e9..f05ed108ea3 100644 --- a/backend/api/swagger/run.swagger.json +++ b/backend/api/swagger/run.swagger.json @@ -17,6 +17,7 @@ "paths": { "/apis/v1beta1/runs": { "get": { + "summary": "Find all runs.", "operationId": "ListRuns", "responses": { "200": { @@ -48,7 +49,7 @@ }, { "name": "sort_by", - "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\nAscending by default.", + "description": "Can be format of \"field_name\", \"field_name asc\" or \"field_name des\"\n(Example, \"name asc\" or \"id des\"). Ascending by default.", "in": "query", "required": false, "type": "string" @@ -62,7 +63,10 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -75,7 +79,7 @@ }, { "name": "filter", - "description": "A base-64 encoded, JSON-serialized Filter protocol buffer (see\nfilter.proto).", + "description": "A url-encoded, JSON-serialized Filter protocol buffer (see\n[filter.proto](https://github.com/kubeflow/pipelines/\nblob/master/backend/api/filter.proto)).", "in": "query", "required": false, "type": "string" @@ -86,6 +90,7 @@ ] }, "post": { + "summary": "Create a new run.", "operationId": "CreateRun", "responses": { "200": { @@ -118,6 +123,7 @@ }, "/apis/v1beta1/runs/{id}": { "delete": { + "summary": "Delete a run.", "operationId": "DeleteRun", "responses": { "200": { @@ -148,6 +154,7 @@ }, "/apis/v1beta1/runs/{id}:archive": { "post": { + "summary": "Archive a run.", "operationId": "ArchiveRun", "responses": { "200": { @@ -178,6 +185,7 @@ }, "/apis/v1beta1/runs/{id}:unarchive": { "post": { + "summary": "Restore an archived run.", "operationId": "UnarchiveRun", "responses": { "200": { @@ -208,6 +216,7 @@ }, "/apis/v1beta1/runs/{run_id}": { "get": { + "summary": "Find a specific run by ID.", "operationId": "GetRun", "responses": { "200": { @@ -238,6 +247,7 @@ }, "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read": { "get": { + "summary": "Find a run's artifact data.", "operationId": "ReadArtifact", "responses": { "200": { @@ -283,6 +293,7 @@ }, "/apis/v1beta1/runs/{run_id}/retry": { "post": { + "summary": "Re-initiate a failed or terminated run.", "operationId": "RetryRun", "responses": { "200": { @@ -313,6 +324,7 @@ }, "/apis/v1beta1/runs/{run_id}/terminate": { "post": { + "summary": "Terminate an active run.", "operationId": "TerminateRun", "responses": { "200": { @@ -343,7 +355,7 @@ }, "/apis/v1beta1/runs/{run_id}:reportMetrics": { "post": { - "summary": "ReportRunMetrics reports metrics of a run. Each metric is reported in its\nown transaction, so this API accepts partial failures. Metric can be uniquely\nidentified by (run_id, node_id, name). Duplicate reporting will be\nignored by the API. First reporting wins.", + "summary": "ReportRunMetrics reports metrics of a run. Each metric is reported in its\nown transaction, so this API accepts partial failures. Metric can be\nuniquely identified by (run_id, node_id, name). Duplicate reporting will be\nignored by the API. First reporting wins.", "operationId": "ReportRunMetrics", "responses": { "200": { @@ -584,7 +596,10 @@ "enum": [ "UNKNOWN_RESOURCE_TYPE", "EXPERIMENT", - "JOB" + "JOB", + "PIPELINE", + "PIPELINE_VERSION", + "NAMESPACE" ], "default": "UNKNOWN_RESOURCE_TYPE" }, @@ -617,6 +632,10 @@ }, "description": "Optional input field. Specify which resource this run belongs to." }, + "service_account": { + "type": "string", + "description": "Optional input field. Specify which Kubernetes service account this run uses." + }, "created_at": { "type": "string", "format": "date-time", @@ -665,11 +684,11 @@ "properties": { "name": { "type": "string", - "description": "Required. The user defined name of the metric. It must between 1 and 63 characters\nlong and must conform to the following regular expression:\n`[a-z]([-a-z0-9]*[a-z0-9])?`." + "description": "Required. The user defined name of the metric. It must between 1 and 63\ncharacters long and must conform to the following regular expression:\n`[a-z]([-a-z0-9]*[a-z0-9])?`." }, "node_id": { "type": "string", - "description": "Required. The runtime node ID which reports the metric. The node ID can be found in\nthe RunDetail.workflow.Status. Metric with same (node_id, name)\nare considerd as duplicate. Only the first reporting will be recorded. Max length is 128." + "description": "Required. The runtime node ID which reports the metric. The node ID can be\nfound in the RunDetail.workflow.Status. Metric with same (node_id, name)\nare considerd as duplicate. Only the first reporting will be recorded. Max\nlength is 128." }, "number_value": { "type": "number", diff --git a/backend/api/swagger/visualization.swagger.json b/backend/api/swagger/visualization.swagger.json index 031539c3c79..fb8515b38a7 100755 --- a/backend/api/swagger/visualization.swagger.json +++ b/backend/api/swagger/visualization.swagger.json @@ -15,7 +15,7 @@ "application/json" ], "paths": { - "/apis/v1beta1/visualizations": { + "/apis/v1beta1/visualizations/{namespace}": { "post": { "operationId": "CreateVisualization", "responses": { @@ -33,6 +33,12 @@ } }, "parameters": [ + { + "name": "namespace", + "in": "path", + "required": true, + "type": "string" + }, { "name": "body", "in": "body", diff --git a/backend/api/visualization.proto b/backend/api/visualization.proto index 270e9890025..b65b21582fb 100644 --- a/backend/api/visualization.proto +++ b/backend/api/visualization.proto @@ -58,7 +58,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { service VisualizationService { rpc CreateVisualization(CreateVisualizationRequest) returns (Visualization) { option (google.api.http) = { - post: "/apis/v1beta1/visualizations" + post: "/apis/v1beta1/visualizations/{namespace}" body: "visualization" }; } @@ -69,6 +69,7 @@ service VisualizationService { // for determining output path. message CreateVisualizationRequest { Visualization visualization = 1; + string namespace = 2; } message Visualization { diff --git a/backend/build_api_server.sh b/backend/build_api_server.sh index 63848d03647..97e9871bf5f 100755 --- a/backend/build_api_server.sh +++ b/backend/build_api_server.sh @@ -36,8 +36,9 @@ LONGOPTS=use_remote_build,gcp_credentials_file: PARSED=$(getopt --longoptions=$LONGOPTS --options=$OPTS --name "$0" -- "$@") eval set -- "$PARSED" -USE_REMOTE_BUILD=true +USE_REMOTE_BUILD=false GCP_CREDENTIALS_FILE="gs://ml-pipeline-test-bazel/ml-pipeline-test-bazel-builder-credentials.json" +MACHINE_ARCH=`uname -m` while true; do case $1 in @@ -76,16 +77,43 @@ if [[ ${USE_REMOTE_BUILD} == true ]]; then fi GCP_CREDENTIALS="$(gsutil cat ${GCP_CREDENTIALS_FILE})" - docker build \ - -t "${IMAGE_TAG}" \ - -f backend/Dockerfile \ - . \ - --build-arg use_remote_build=true \ - --build-arg google_application_credentials="${GCP_CREDENTIALS}" + if [ $MACHINE_ARCH == "aarch64" ]; then + docker build \ + -t bazel:0.24.0 \ + -f backend/Dockerfile.bazel . + + docker build \ + -t "${IMAGE_TAG}" \ + -f backend/Dockerfile \ + . \ + --build-arg use_remote_build=true \ + --build-arg google_application_credentials="${GCP_CREDENTIALS}" \ + --build-arg BAZEL_IMAGE=bazel:0.24.0 + else + docker build \ + -t "${IMAGE_TAG}" \ + -f backend/Dockerfile \ + . \ + --build-arg use_remote_build=true \ + --build-arg google_application_credentials="${GCP_CREDENTIALS}" + fi + else echo "Building API Server with local execution..." - docker build \ - -t "${IMAGE_TAG}" \ - -f backend/Dockerfile \ - . + if [ $MACHINE_ARCH == "aarch64" ]; then + docker build \ + -t bazel:0.24.0 \ + -f backend/Dockerfile.bazel . + + docker build \ + -t "${IMAGE_TAG}" \ + -f backend/Dockerfile \ + . \ + --build-arg BAZEL_IMAGE=bazel:0.24.0 + else + docker build \ + -t "${IMAGE_TAG}" \ + -f backend/Dockerfile \ + . + fi fi diff --git a/backend/metadata_writer/Dockerfile b/backend/metadata_writer/Dockerfile new file mode 100644 index 00000000000..2eb7f64fb82 --- /dev/null +++ b/backend/metadata_writer/Dockerfile @@ -0,0 +1,9 @@ +# ml-metadata package depends on tensorflow package +FROM python:3.7 +COPY backend/metadata_writer/requirements.txt /kfp/metadata_writer/ +RUN python3 -m pip install -r /kfp/metadata_writer/requirements.txt +COPY components/license.sh components/third_party_licenses.csv /kfp/metadata_writer/ +RUN mkdir /usr/licenses && /kfp/metadata_writer/license.sh /kfp/metadata_writer/third_party_licenses.csv /usr/licenses + +COPY backend/metadata_writer/src/* /kfp/metadata_writer/ +CMD python3 -u /kfp/metadata_writer/metadata_writer.py diff --git a/backend/metadata_writer/OWNERS b/backend/metadata_writer/OWNERS new file mode 100644 index 00000000000..898f657f90b --- /dev/null +++ b/backend/metadata_writer/OWNERS @@ -0,0 +1,4 @@ +approvers: + - Ark-kun +reviewers: + - Ark-kun diff --git a/backend/metadata_writer/README.md b/backend/metadata_writer/README.md new file mode 100644 index 00000000000..14d3a785b10 --- /dev/null +++ b/backend/metadata_writer/README.md @@ -0,0 +1,9 @@ +# metadata\_writer + +## Updating python dependencies + +[pip-tools](https://github.com/jazzband/pip-tools) is used to manage python +dependencies. To update dependencies, edit [requirements.in](requirements.in) +and run `../update_requirements.sh python:3.7` to update and pin the transitive +dependencies. + diff --git a/backend/metadata_writer/requirements.in b/backend/metadata_writer/requirements.in new file mode 100644 index 00000000000..f067dc8d0fe --- /dev/null +++ b/backend/metadata_writer/requirements.in @@ -0,0 +1,2 @@ +kubernetes>=8.0.0,<11.0.0 +ml-metadata==0.21.2 diff --git a/backend/metadata_writer/requirements.txt b/backend/metadata_writer/requirements.txt new file mode 100644 index 00000000000..53773746107 --- /dev/null +++ b/backend/metadata_writer/requirements.txt @@ -0,0 +1,48 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile requirements.in +# +absl-py==0.9.0 # via ml-metadata, tensorboard, tensorflow +astor==0.8.1 # via tensorflow +cachetools==4.0.0 # via google-auth +certifi==2019.11.28 # via kubernetes, requests +chardet==3.0.4 # via requests +gast==0.2.2 # via tensorflow +google-auth-oauthlib==0.4.1 # via tensorboard +google-auth==1.12.0 # via google-auth-oauthlib, kubernetes, tensorboard +google-pasta==0.2.0 # via tensorflow +grpcio==1.27.2 # via tensorboard, tensorflow +h5py==2.10.0 # via keras-applications +idna==2.9 # via requests +keras-applications==1.0.8 # via tensorflow +keras-preprocessing==1.1.0 # via tensorflow +kubernetes==10.1.0 # via -r requirements.in (line 1) +markdown==3.2.1 # via tensorboard +ml-metadata==0.21.2 # via -r requirements.in (line 2) +numpy==1.18.2 # via h5py, keras-applications, keras-preprocessing, opt-einsum, scipy, tensorboard, tensorflow +oauthlib==3.1.0 # via requests-oauthlib +opt-einsum==3.2.0 # via tensorflow +protobuf==3.11.3 # via ml-metadata, tensorboard, tensorflow +pyasn1-modules==0.2.8 # via google-auth +pyasn1==0.4.8 # via pyasn1-modules, rsa +python-dateutil==2.8.1 # via kubernetes +pyyaml==3.13 # via kubernetes +requests-oauthlib==1.3.0 # via google-auth-oauthlib, kubernetes +requests==2.23.0 # via kubernetes, requests-oauthlib, tensorboard +rsa==4.0 # via google-auth +scipy==1.4.1 # via tensorflow +six==1.14.0 # via absl-py, google-auth, google-pasta, grpcio, h5py, keras-preprocessing, kubernetes, ml-metadata, protobuf, python-dateutil, tensorboard, tensorflow, websocket-client +tensorboard==2.1.1 # via tensorflow +tensorflow-estimator==2.1.0 # via tensorflow +tensorflow==2.1.0 # via ml-metadata +termcolor==1.1.0 # via tensorflow +urllib3==1.25.8 # via kubernetes, requests +websocket-client==0.57.0 # via kubernetes +werkzeug==1.0.1 # via tensorboard +wheel==0.34.2 # via tensorboard, tensorflow +wrapt==1.12.1 # via tensorflow + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/backend/metadata_writer/src/metadata_helpers.py b/backend/metadata_writer/src/metadata_helpers.py new file mode 100644 index 00000000000..2078fa7049d --- /dev/null +++ b/backend/metadata_writer/src/metadata_helpers.py @@ -0,0 +1,414 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os +import sys +import ml_metadata +from time import sleep +from ml_metadata.proto import metadata_store_pb2 +from ml_metadata.metadata_store import metadata_store + + +def connect_to_mlmd() -> metadata_store.MetadataStore: + metadata_service_host = os.environ.get( + 'METADATA_GRPC_SERVICE_SERVICE_HOST', 'metadata-grpc-service') + metadata_service_port = int(os.environ.get( + 'METADATA_GRPC_SERVICE_SERVICE_PORT', 8080)) + + mlmd_connection_config = metadata_store_pb2.MetadataStoreClientConfig( + host=metadata_service_host, + port=metadata_service_port, + ) + + # Checking the connection to the Metadata store. + for _ in range(100): + try: + mlmd_store = metadata_store.MetadataStore(mlmd_connection_config) + # All get requests fail when the DB is empty, so we have to use a put request. + # TODO: Replace with _ = mlmd_store.get_context_types() when https://github.com/google/ml-metadata/issues/28 is fixed + _ = mlmd_store.put_execution_type( + metadata_store_pb2.ExecutionType( + name="DummyExecutionType", + ) + ) + return mlmd_store + except Exception as e: + print('Failed to access the Metadata store. Exception: "{}"'.format(str(e)), file=sys.stderr) + sys.stderr.flush() + sleep(1) + + raise RuntimeError('Could not connect to the Metadata store.') + + +def get_or_create_artifact_type(store, type_name, properties: dict = None) -> metadata_store_pb2.ArtifactType: + try: + artifact_type = store.get_artifact_type(type_name=type_name) + return artifact_type + except: + artifact_type = metadata_store_pb2.ArtifactType( + name=type_name, + properties=properties, + ) + artifact_type.id = store.put_artifact_type(artifact_type) # Returns ID + return artifact_type + + +def get_or_create_execution_type(store, type_name, properties: dict = None) -> metadata_store_pb2.ExecutionType: + try: + execution_type = store.get_execution_type(type_name=type_name) + return execution_type + except: + execution_type = metadata_store_pb2.ExecutionType( + name=type_name, + properties=properties, + ) + execution_type.id = store.put_execution_type(execution_type) # Returns ID + return execution_type + + +def get_or_create_context_type(store, type_name, properties: dict = None) -> metadata_store_pb2.ContextType: + try: + context_type = store.get_context_type(type_name=type_name) + return context_type + except: + context_type = metadata_store_pb2.ContextType( + name=type_name, + properties=properties, + ) + context_type.id = store.put_context_type(context_type) # Returns ID + return context_type + + +def create_artifact_with_type( + store, + uri: str, + type_name: str, + properties: dict = None, + type_properties: dict = None, + custom_properties: dict = None, +) -> metadata_store_pb2.Artifact: + artifact_type = get_or_create_artifact_type( + store=store, + type_name=type_name, + properties=type_properties, + ) + artifact = metadata_store_pb2.Artifact( + uri=uri, + type_id=artifact_type.id, + properties=properties, + custom_properties=custom_properties, + ) + artifact.id = store.put_artifacts([artifact])[0] + return artifact + + +def create_execution_with_type( + store, + type_name: str, + properties: dict = None, + type_properties: dict = None, + custom_properties: dict = None, +) -> metadata_store_pb2.Execution: + execution_type = get_or_create_execution_type( + store=store, + type_name=type_name, + properties=type_properties, + ) + execution = metadata_store_pb2.Execution( + type_id=execution_type.id, + properties=properties, + custom_properties=custom_properties, + ) + execution.id = store.put_executions([execution])[0] + return execution + + +def create_context_with_type( + store, + context_name: str, + type_name: str, + properties: dict = None, + type_properties: dict = None, + custom_properties: dict = None, +) -> metadata_store_pb2.Context: + # ! Context_name must be unique + context_type = get_or_create_context_type( + store=store, + type_name=type_name, + properties=type_properties, + ) + context = metadata_store_pb2.Context( + name=context_name, + type_id=context_type.id, + properties=properties, + custom_properties=custom_properties, + ) + context.id = store.put_contexts([context])[0] + return context + + +import functools +@functools.lru_cache(maxsize=128) +def get_context_by_name( + store, + context_name: str, +) -> metadata_store_pb2.Context: + matching_contexts = [context for context in store.get_contexts() if context.name == context_name] + assert len(matching_contexts) <= 1 + if len(matching_contexts) == 0: + raise ValueError('Context with name "{}" was not found'.format(context_name)) + return matching_contexts[0] + + +def get_or_create_context_with_type( + store, + context_name: str, + type_name: str, + properties: dict = None, + type_properties: dict = None, + custom_properties: dict = None, +) -> metadata_store_pb2.Context: + try: + context = get_context_by_name(store, context_name) + except: + context = create_context_with_type( + store=store, + context_name=context_name, + type_name=type_name, + properties=properties, + type_properties=type_properties, + custom_properties=custom_properties, + ) + return context + + # Verifying that the context has the expected type name + context_types = store.get_context_types_by_id([context.type_id]) + assert len(context_types) == 1 + if context_types[0].name != type_name: + raise RuntimeError('Context "{}" was found, but it has type "{}" instead of "{}"'.format(context_name, context_types[0].name, type_name)) + return context + + +def create_new_execution_in_existing_context( + store, + execution_type_name: str, + context_id: int, + properties: dict = None, + execution_type_properties: dict = None, + custom_properties: dict = None, +) -> metadata_store_pb2.Execution: + execution = create_execution_with_type( + store=store, + properties=properties, + custom_properties=custom_properties, + type_name=execution_type_name, + type_properties=execution_type_properties, + ) + association = metadata_store_pb2.Association( + execution_id=execution.id, + context_id=context_id, + ) + + store.put_attributions_and_associations([], [association]) + return execution + + +RUN_CONTEXT_TYPE_NAME = "KfpRun" +KFP_EXECUTION_TYPE_NAME_PREFIX = 'components.' + +ARTIFACT_IO_NAME_PROPERTY_NAME = "name" +EXECUTION_COMPONENT_ID_PROPERTY_NAME = "component_id"# ~= Task ID + +#TODO: Get rid of these when https://github.com/tensorflow/tfx/issues/905 and https://github.com/kubeflow/pipelines/issues/2562 are fixed +ARTIFACT_PIPELINE_NAME_PROPERTY_NAME = "pipeline_name" +EXECUTION_PIPELINE_NAME_PROPERTY_NAME = "pipeline_name" +CONTEXT_PIPELINE_NAME_PROPERTY_NAME = "pipeline_name" +ARTIFACT_RUN_ID_PROPERTY_NAME = "run_id" +EXECUTION_RUN_ID_PROPERTY_NAME = "run_id" +CONTEXT_RUN_ID_PROPERTY_NAME = "run_id" + +KFP_POD_NAME_EXECUTION_PROPERTY_NAME = 'kfp_pod_name' + +ARTIFACT_ARGO_ARTIFACT_PROPERTY_NAME = 'argo_artifact' + + +def get_or_create_run_context( + store, + run_id: str, +) -> metadata_store_pb2.Context: + context = get_or_create_context_with_type( + store=store, + context_name=run_id, + type_name=RUN_CONTEXT_TYPE_NAME, + type_properties={ + CONTEXT_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.STRING, + CONTEXT_RUN_ID_PROPERTY_NAME: metadata_store_pb2.STRING, + }, + properties={ + CONTEXT_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.Value(string_value=run_id), + CONTEXT_RUN_ID_PROPERTY_NAME: metadata_store_pb2.Value(string_value=run_id), + }, + ) + return context + + +def create_new_execution_in_existing_run_context( + store, + execution_type_name: str, + context_id: int, + pod_name: str, + # TODO: Remove when UX stops relying on thsese properties + pipeline_name: str = None, + run_id: str = None, + instance_id: str = None, + custom_properties = None, +) -> metadata_store_pb2.Execution: + pipeline_name = pipeline_name or 'Context_' + str(context_id) + '_pipeline' + run_id = run_id or 'Context_' + str(context_id) + '_run' + instance_id = instance_id or execution_type_name + return create_new_execution_in_existing_context( + store=store, + execution_type_name=execution_type_name, + context_id=context_id, + execution_type_properties={ + EXECUTION_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.STRING, + EXECUTION_RUN_ID_PROPERTY_NAME: metadata_store_pb2.STRING, + EXECUTION_COMPONENT_ID_PROPERTY_NAME: metadata_store_pb2.STRING, + }, + # TODO: Remove when UX stops relying on thsese properties + properties={ + EXECUTION_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.Value(string_value=pipeline_name), # Mistakenly used for grouping in the UX + EXECUTION_RUN_ID_PROPERTY_NAME: metadata_store_pb2.Value(string_value=run_id), + EXECUTION_COMPONENT_ID_PROPERTY_NAME: metadata_store_pb2.Value(string_value=instance_id), # should set to task ID, not component ID + }, + custom_properties={ + KFP_POD_NAME_EXECUTION_PROPERTY_NAME: metadata_store_pb2.Value(string_value=pod_name), + }, + ) + + +def create_new_artifact_event_and_attribution( + store, + execution_id: int, + context_id: int, + uri: str, + type_name: str, + event_type: metadata_store_pb2.Event.Type, + properties: dict = None, + artifact_type_properties: dict = None, + custom_properties: dict = None, + artifact_name_path: metadata_store_pb2.Event.Path = None, + milliseconds_since_epoch: int = None, +) -> metadata_store_pb2.Artifact: + artifact = create_artifact_with_type( + store=store, + uri=uri, + type_name=type_name, + type_properties=artifact_type_properties, + properties=properties, + custom_properties=custom_properties, + ) + event = metadata_store_pb2.Event( + execution_id=execution_id, + artifact_id=artifact.id, + type=event_type, + path=artifact_name_path, + milliseconds_since_epoch=milliseconds_since_epoch, + ) + store.put_events([event]) + + attribution = metadata_store_pb2.Attribution( + context_id=context_id, + artifact_id=artifact.id, + ) + store.put_attributions_and_associations([attribution], []) + + return artifact + + +def link_execution_to_input_artifact( + store, + execution_id: int, + uri: str, + input_name: str, +) -> metadata_store_pb2.Artifact: + artifacts = store.get_artifacts_by_uri(uri) + if len(artifacts) == 0: + print('Error: Not found upstream artifact with URI={}.'.format(uri), file=sys.stderr) + return None + if len(artifacts) > 1: + print('Error: Found multiple artifacts with the same URI. {} Using the last one..'.format(artifacts), file=sys.stderr) + + artifact = artifacts[-1] + + event = metadata_store_pb2.Event( + execution_id=execution_id, + artifact_id=artifact.id, + type=metadata_store_pb2.Event.INPUT, + path=metadata_store_pb2.Event.Path( + steps=[ + metadata_store_pb2.Event.Path.Step( + key=input_name, + ), + ] + ), + ) + store.put_events([event]) + return artifact + + +def create_new_output_artifact( + store, + execution_id: int, + context_id: int, + uri: str, + type_name: str, + output_name: str, + run_id: str = None, + argo_artifact: dict = None, +) -> metadata_store_pb2.Artifact: + properties = { + ARTIFACT_IO_NAME_PROPERTY_NAME: metadata_store_pb2.Value(string_value=output_name), + } + custom_properties = {} + if run_id: + properties[ARTIFACT_PIPELINE_NAME_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=str(run_id)) + properties[ARTIFACT_RUN_ID_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=str(run_id)) + if argo_artifact: + custom_properties[ARTIFACT_ARGO_ARTIFACT_PROPERTY_NAME] = metadata_store_pb2.Value(string_value=json.dumps(argo_artifact, sort_keys=True)) + return create_new_artifact_event_and_attribution( + store=store, + execution_id=execution_id, + context_id=context_id, + uri=uri, + type_name=type_name, + event_type=metadata_store_pb2.Event.OUTPUT, + artifact_name_path=metadata_store_pb2.Event.Path( + steps=[ + metadata_store_pb2.Event.Path.Step( + key=output_name, + #index=0, + ), + ] + ), + properties=properties, + artifact_type_properties={ + ARTIFACT_IO_NAME_PROPERTY_NAME: metadata_store_pb2.STRING, + ARTIFACT_PIPELINE_NAME_PROPERTY_NAME: metadata_store_pb2.STRING, + ARTIFACT_RUN_ID_PROPERTY_NAME: metadata_store_pb2.STRING, + }, + custom_properties=custom_properties, + #milliseconds_since_epoch=int(datetime.now(timezone.utc).timestamp() * 1000), # Happens automatically + ) diff --git a/backend/metadata_writer/src/metadata_writer.py b/backend/metadata_writer/src/metadata_writer.py new file mode 100644 index 00000000000..7c238942d33 --- /dev/null +++ b/backend/metadata_writer/src/metadata_writer.py @@ -0,0 +1,338 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import hashlib +import os +import sys +import re +import kubernetes +import yaml +from time import sleep + +from metadata_helpers import * + + +namespace_to_watch = os.environ.get('NAMESPACE_TO_WATCH', 'default') + + +kubernetes.config.load_incluster_config() +k8s_api = kubernetes.client.CoreV1Api() +k8s_watch = kubernetes.watch.Watch() + + +patch_retries = 20 +sleep_time = 0.1 + + +def patch_pod_metadata( + namespace: str, + pod_name: str, + patch: dict, + k8s_api: kubernetes.client.CoreV1Api = None, +): + k8s_api = k8s_api or kubernetes.client.CoreV1Api() + patch = { + 'metadata': patch + } + for retry in range(patch_retries): + try: + pod = k8s_api.patch_namespaced_pod( + name=pod_name, + namespace=namespace, + body=patch, + ) + return pod + except Exception as e: + print(e) + sleep(sleep_time) + + +#Connecting to MetadataDB +mlmd_store = connect_to_mlmd() +print("Connected to the metadata store") + + +ARGO_OUTPUTS_ANNOTATION_KEY = 'workflows.argoproj.io/outputs' +ARGO_TEMPLATE_ANNOTATION_KEY = 'workflows.argoproj.io/template' +KFP_COMPONENT_SPEC_ANNOTATION_KEY = 'pipelines.kubeflow.org/component_spec' +METADATA_EXECUTION_ID_LABEL_KEY = 'pipelines.kubeflow.org/metadata_execution_id' +METADATA_CONTEXT_ID_LABEL_KEY = 'pipelines.kubeflow.org/metadata_context_id' +METADATA_ARTIFACT_IDS_ANNOTATION_KEY = 'pipelines.kubeflow.org/metadata_artifact_ids' +METADATA_INPUT_ARTIFACT_IDS_ANNOTATION_KEY = 'pipelines.kubeflow.org/metadata_input_artifact_ids' +METADATA_OUTPUT_ARTIFACT_IDS_ANNOTATION_KEY = 'pipelines.kubeflow.org/metadata_output_artifact_ids' + +ARGO_WORKFLOW_LABEL_KEY = 'workflows.argoproj.io/workflow' +ARGO_COMPLETED_LABEL_KEY = 'workflows.argoproj.io/completed' +METADATA_WRITTEN_LABEL_KEY = 'pipelines.kubeflow.org/metadata_written' + + +def output_name_to_argo(name: str) -> str: + import re + return re.sub('-+', '-', re.sub('[^-0-9a-z]+', '-', name.lower())).strip('-') + +def is_s3_endpoint(endpoint: str) -> bool: + return re.search('^.*s3.*amazonaws.com.*$', endpoint) + +def get_object_store_provider(endpoint: str) -> bool: + if is_s3_endpoint(endpoint): + return 's3' + else: + return 'minio' + +def argo_artifact_to_uri(artifact: dict) -> str: + # s3 here means s3 compatible object storage. not AWS S3. + if 's3' in artifact: + s3_artifact = artifact['s3'] + return '{provider}://{bucket}/{key}'.format( + provider=get_object_store_provider(s3_artifact['endpoint']), + bucket=s3_artifact.get('bucket', ''), + key=s3_artifact.get('key', ''), + ) + elif 'raw' in artifact: + return None + else: + return None + + +def is_tfx_pod(pod) -> bool: + main_containers = [container for container in pod.spec.containers if container.name == 'main'] + if len(main_containers) != 1: + return False + main_container = main_containers[0] + return main_container.command and main_container.command[-1].endswith('tfx/orchestration/kubeflow/container_entrypoint.py') + + +# Caches (not expected to be persistent) +# These caches are only used to prevent race conditions. Race conditions happen because the writer can see multiple versions of K8s object before the applied labels show up. +# They are expected to be lost when restarting the service. +# The operation of the Metadata Writer remains correct even if it's getting restarted frequently. (Kubernetes only sends the latest version of resource for new watchers.) +# Technically, we could remove the objects from cache as soon as we see that our labels have been applied successfully. +pod_name_to_execution_id = {} +workflow_name_to_context_id = {} +pods_with_written_metadata = set() + +while True: + print("Start watching Kubernetes Pods created by Argo") + for event in k8s_watch.stream( + k8s_api.list_namespaced_pod, + namespace=namespace_to_watch, + label_selector=ARGO_WORKFLOW_LABEL_KEY, + ): + try: + obj = event['object'] + print('Kubernetes Pod event: ', event['type'], obj.metadata.name, obj.metadata.resource_version) + if event['type'] == 'ERROR': + print(event) + + pod_name = obj.metadata.name + + # Logging pod changes for debugging + with open('/tmp/pod_' + obj.metadata.name + '_' + obj.metadata.resource_version, 'w') as f: + f.write(yaml.dump(obj.to_dict())) + + assert obj.kind == 'Pod' + + if METADATA_WRITTEN_LABEL_KEY in obj.metadata.labels: + continue + + # Skip TFX pods - they have their own metadata writers + if is_tfx_pod(obj): + continue + + argo_workflow_name = obj.metadata.labels[ARGO_WORKFLOW_LABEL_KEY] # Should exist due to initial filtering + argo_template = json.loads(obj.metadata.annotations[ARGO_TEMPLATE_ANNOTATION_KEY]) + argo_template_name = argo_template['name'] + + component_name = argo_template_name + component_version = component_name + argo_output_name_to_type = {} + if KFP_COMPONENT_SPEC_ANNOTATION_KEY in obj.metadata.annotations: + component_spec_text = obj.metadata.annotations[KFP_COMPONENT_SPEC_ANNOTATION_KEY] + component_spec = json.loads(component_spec_text) + component_spec_digest = hashlib.sha256(component_spec_text.encode()).hexdigest() + component_name = component_spec.get('name', component_name) + component_version = component_name + '@sha256=' + component_spec_digest + output_name_to_type = {output['name']: output.get('type', None) for output in component_spec.get('outputs', [])} + argo_output_name_to_type = {output_name_to_argo(k): v for k, v in output_name_to_type.items() if v} + + if obj.metadata.name in pod_name_to_execution_id: + execution_id = pod_name_to_execution_id[obj.metadata.name] + context_id = workflow_name_to_context_id[argo_workflow_name] + elif METADATA_EXECUTION_ID_LABEL_KEY in obj.metadata.labels: + execution_id = int(obj.metadata.labels[METADATA_EXECUTION_ID_LABEL_KEY]) + context_id = int(obj.metadata.labels[METADATA_CONTEXT_ID_LABEL_KEY]) + print('Found execution id: {}, context id: {} for pod {}.'.format(execution_id, context_id, obj.metadata.name)) + else: + run_context = get_or_create_run_context( + store=mlmd_store, + run_id=argo_workflow_name, # We can switch to internal run IDs once backend starts adding them + ) + + # Adding new execution to the database + execution = create_new_execution_in_existing_run_context( + store=mlmd_store, + context_id=run_context.id, + execution_type_name=KFP_EXECUTION_TYPE_NAME_PREFIX + component_version, + pod_name=pod_name, + pipeline_name=argo_workflow_name, + run_id=argo_workflow_name, + instance_id=component_name, + ) + + argo_input_artifacts = argo_template.get('inputs', {}).get('artifacts', []) + input_artifact_ids = [] + for argo_artifact in argo_input_artifacts: + artifact_uri = argo_artifact_to_uri(argo_artifact) + if not artifact_uri: + continue + + input_name = argo_artifact.get('path', '') # Every artifact should have a path in Argo + input_artifact_path_prefix = '/tmp/inputs/' + input_artifact_path_postfix = '/data' + if input_name.startswith(input_artifact_path_prefix): + input_name = input_name[len(input_artifact_path_prefix):] + if input_name.endswith(input_artifact_path_postfix): + input_name = input_name[0: -len(input_artifact_path_postfix)] + + artifact = link_execution_to_input_artifact( + store=mlmd_store, + execution_id=execution.id, + uri=artifact_uri, + input_name=input_name, + ) + if artifact is None: + # TODO: Maybe there is a better way to handle missing upstream artifacts + continue + + input_artifact_ids.append(dict( + id=artifact.id, + name=input_name, + uri=artifact.uri, + )) + print('Found Input Artifact: ' + str(dict( + input_name=input_name, + id=artifact.id, + uri=artifact.uri, + ))) + + execution_id = execution.id + context_id = run_context.id + + obj.metadata.labels[METADATA_EXECUTION_ID_LABEL_KEY] = execution_id + obj.metadata.labels[METADATA_CONTEXT_ID_LABEL_KEY] = context_id + + metadata_to_add = { + 'labels': { + METADATA_EXECUTION_ID_LABEL_KEY: str(execution_id), + METADATA_CONTEXT_ID_LABEL_KEY: str(context_id), + }, + 'annotations': { + METADATA_INPUT_ARTIFACT_IDS_ANNOTATION_KEY: json.dumps(input_artifact_ids), + }, + } + + patch_pod_metadata( + namespace=obj.metadata.namespace, + pod_name=obj.metadata.name, + patch=metadata_to_add, + ) + pod_name_to_execution_id[obj.metadata.name] = execution_id + workflow_name_to_context_id[argo_workflow_name] = context_id + + print('New execution id: {}, context id: {} for pod {}.'.format(execution_id, context_id, obj.metadata.name)) + + print('Execution: ' + str(dict( + context_id=context_id, + context_name=argo_workflow_name, + execution_id=execution_id, + execution_name=obj.metadata.name, + component_name=component_name, + ))) + + # TODO: Log input parameters as execution options. + # Unfortunately, DSL compiler loses the information about inputs and their arguments. + + if ( + obj.metadata.name not in pods_with_written_metadata + and ( + obj.metadata.labels.get(ARGO_COMPLETED_LABEL_KEY, 'false') == 'true' + or ARGO_OUTPUTS_ANNOTATION_KEY in obj.metadata.annotations + ) + ): + artifact_ids = [] + + if ARGO_OUTPUTS_ANNOTATION_KEY in obj.metadata.annotations: # Should be present + argo_outputs = json.loads(obj.metadata.annotations[ARGO_OUTPUTS_ANNOTATION_KEY]) + argo_output_artifacts = {} + + for artifact in argo_outputs.get('artifacts', []): + art_name = artifact['name'] + output_prefix = argo_template_name + '-' + if art_name.startswith(output_prefix): + art_name = art_name[len(output_prefix):] + argo_output_artifacts[art_name] = artifact + + output_artifacts = [] + for name, art in argo_output_artifacts.items(): + artifact_uri = argo_artifact_to_uri(art) + if not artifact_uri: + continue + artifact_type_name = argo_output_name_to_type.get(name, 'NoType') # Cannot be None or '' + + print('Adding Output Artifact: ' + str(dict( + output_name=name, + uri=artifact_uri, + type=artifact_type_name, + ))) + + artifact = create_new_output_artifact( + store=mlmd_store, + execution_id=execution_id, + context_id=context_id, + uri=artifact_uri, + type_name=artifact_type_name, + output_name=name, + #run_id='Context_' + str(context_id) + '_run', + run_id=argo_workflow_name, + argo_artifact=art, + ) + + artifact_ids.append(dict( + id=artifact.id, + name=name, + uri=artifact_uri, + type=artifact_type_name, + )) + + metadata_to_add = { + 'labels': { + METADATA_WRITTEN_LABEL_KEY: 'true', + }, + 'annotations': { + METADATA_OUTPUT_ARTIFACT_IDS_ANNOTATION_KEY: json.dumps(artifact_ids), + }, + } + + patch_pod_metadata( + namespace=obj.metadata.namespace, + pod_name=obj.metadata.name, + patch=metadata_to_add, + ) + + pods_with_written_metadata.add(obj.metadata.name) + + except Exception as e: + import traceback + print(traceback.format_exc()) diff --git a/backend/requirements.in b/backend/requirements.in new file mode 100644 index 00000000000..14f829bb123 --- /dev/null +++ b/backend/requirements.in @@ -0,0 +1 @@ +tfx==0.21.2 diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 00000000000..130b3a5b99d --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,134 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile requirements.in +# +absl-py==0.8.1 # via ml-metadata, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl +apache-beam[gcp]==2.17.0 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl +astor==0.8.1 # via tensorflow +attrs==19.3.0 # via jsonschema +avro-python3==1.9.1 # via apache-beam, tensorflow-data-validation, tensorflow-model-analysis, tfx-bsl +backcall==0.1.0 # via ipython +bleach==3.1.1 # via nbconvert +cachetools==3.1.1 # via apache-beam, google-auth +certifi==2019.11.28 # via requests +chardet==3.0.4 # via requests +click==7.0 # via tfx +crcmod==1.7 # via apache-beam +decorator==4.4.1 # via ipython, traitlets +defusedxml==0.6.0 # via nbconvert +dill==0.3.0 # via apache-beam +docker==4.2.0 # via tfx +docopt==0.6.2 # via hdfs +entrypoints==0.3 # via nbconvert +fastavro==0.21.24 # via apache-beam +fasteners==0.15 # via google-apitools +future==0.18.2 # via apache-beam +gast==0.2.2 # via tensorflow +google-api-core[grpc]==1.16.0 # via google-cloud-bigtable, google-cloud-core, google-cloud-datastore, google-cloud-pubsub +google-api-python-client==1.7.11 # via tfx +google-apitools==0.5.28 # via apache-beam +google-auth-httplib2==0.0.3 # via google-api-python-client +google-auth-oauthlib==0.4.1 # via tensorboard +google-auth==1.11.2 # via google-api-core, google-api-python-client, google-auth-httplib2, google-auth-oauthlib, tensorboard +google-cloud-bigquery==1.17.1 # via apache-beam +google-cloud-bigtable==1.0.0 # via apache-beam +google-cloud-core==1.3.0 # via apache-beam, google-cloud-bigquery, google-cloud-bigtable, google-cloud-datastore +google-cloud-datastore==1.7.4 # via apache-beam +google-cloud-pubsub==1.0.2 # via apache-beam +google-pasta==0.1.8 # via tensorflow +google-resumable-media==0.4.1 # via google-cloud-bigquery +googleapis-common-protos[grpc]==1.51.0 # via google-api-core, grpc-google-iam-v1, tensorflow-metadata +grpc-google-iam-v1==0.12.3 # via google-cloud-bigtable, google-cloud-pubsub +grpcio==1.27.1 # via apache-beam, google-api-core, googleapis-common-protos, grpc-google-iam-v1, tensorboard, tensorflow, tensorflow-serving-api, tfx +h5py==2.10.0 # via keras-applications +hdfs==2.5.8 # via apache-beam +httplib2==0.12.0 # via apache-beam, google-api-python-client, google-apitools, google-auth-httplib2, oauth2client +idna==2.9 # via requests +importlib-metadata==1.5.0 # via jsonschema +ipykernel==5.1.4 # via ipywidgets, jupyter, jupyter-console, notebook, qtconsole +ipython-genutils==0.2.0 # via nbformat, notebook, qtconsole, traitlets +ipython==7.9.0 # via ipykernel, ipywidgets, jupyter-console, tensorflow-data-validation +ipywidgets==7.5.1 # via jupyter, tensorflow-model-analysis +jedi==0.16.0 # via ipython +jinja2==2.11.1 # via nbconvert, notebook, tfx +joblib==0.14.1 # via scikit-learn, tensorflow-data-validation +jsonschema==3.2.0 # via nbformat +jupyter-client==6.0.0 # via ipykernel, jupyter-console, notebook, qtconsole +jupyter-console==6.1.0 # via jupyter +jupyter-core==4.6.3 # via jupyter-client, nbconvert, nbformat, notebook, qtconsole +jupyter==1.0.0 # via tensorflow-model-analysis +keras-applications==1.0.8 # via tensorflow +keras-preprocessing==1.1.0 # via tensorflow +markdown==3.2.1 # via tensorboard +markupsafe==1.1.1 # via jinja2 +mistune==0.8.4 # via nbconvert +ml-metadata==0.21.2 # via tfx +mock==2.0.0 # via apache-beam +monotonic==1.5 # via fasteners +nbconvert==5.6.1 # via jupyter, notebook +nbformat==5.0.4 # via ipywidgets, nbconvert, notebook +notebook==6.0.3 # via jupyter, widgetsnbextension +numpy==1.18.1 # via h5py, keras-applications, keras-preprocessing, opt-einsum, pandas, pyarrow, scikit-learn, scipy, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx-bsl +oauth2client==3.0.0 # via apache-beam, google-apitools +oauthlib==3.1.0 # via requests-oauthlib +opt-einsum==3.1.0 # via tensorflow +pandas==0.25.3 # via tensorflow-data-validation, tensorflow-model-analysis +pandocfilters==1.4.2 # via nbconvert +parso==0.6.1 # via jedi +pbr==5.4.4 # via mock +pexpect==4.8.0 # via ipython +pickleshare==0.7.5 # via ipython +prometheus-client==0.7.1 # via notebook +prompt-toolkit==2.0.10 # via ipython, jupyter-console +protobuf==3.11.3 # via apache-beam, google-api-core, google-cloud-bigquery, googleapis-common-protos, ml-metadata, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-metadata, tensorflow-model-analysis, tensorflow-serving-api, tensorflow-transform, tfx, tfx-bsl +ptyprocess==0.6.0 # via pexpect, terminado +pyarrow==0.15.1 # via apache-beam, tensorflow-data-validation, tensorflow-model-analysis, tfx, tfx-bsl +pyasn1-modules==0.2.8 # via google-auth, oauth2client +pyasn1==0.4.8 # via oauth2client, pyasn1-modules, rsa +pydot==1.4.1 # via apache-beam, tensorflow-transform +pygments==2.5.2 # via ipython, jupyter-console, nbconvert, qtconsole +pymongo==3.10.1 # via apache-beam +pyparsing==2.4.6 # via pydot +pyrsistent==0.15.7 # via jsonschema +python-dateutil==2.8.1 # via apache-beam, jupyter-client, pandas +pytz==2019.3 # via apache-beam, google-api-core, pandas +pyyaml==5.3 # via tfx +pyzmq==19.0.0 # via jupyter-client, notebook +qtconsole==4.6.0 # via jupyter +requests-oauthlib==1.3.0 # via google-auth-oauthlib +requests==2.23.0 # via docker, google-api-core, hdfs, requests-oauthlib, tensorboard +rsa==4.0 # via google-auth, oauth2client +scikit-learn==0.21.3 # via tensorflow-data-validation +scipy==1.4.1 # via scikit-learn, tensorflow, tensorflow-model-analysis +send2trash==1.5.0 # via notebook +six==1.14.0 # via absl-py, bleach, docker, fasteners, google-api-core, google-api-python-client, google-apitools, google-auth, google-pasta, google-resumable-media, grpcio, h5py, hdfs, jsonschema, keras-preprocessing, ml-metadata, mock, oauth2client, prompt-toolkit, protobuf, pyarrow, pyrsistent, python-dateutil, tensorboard, tensorflow, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx, tfx-bsl, traitlets, websocket-client +tensorboard==2.1.0 # via tensorflow +tensorflow-data-validation==0.21.4 # via tfx +tensorflow-estimator==2.1.0 # via tensorflow +tensorflow-metadata==0.21.1 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx-bsl +tensorflow-model-analysis==0.21.5 # via -r requirements.in (line 2), tfx +tensorflow-serving-api==2.1.0 # via tfx, tfx-bsl +tensorflow-transform==0.21.2 # via tensorflow-data-validation, tfx +tensorflow==2.1.0 # via ml-metadata, tensorflow-data-validation, tensorflow-model-analysis, tensorflow-serving-api, tensorflow-transform, tfx, tfx-bsl +termcolor==1.1.0 # via tensorflow +terminado==0.8.3 # via notebook +testpath==0.4.4 # via nbconvert +tfx-bsl==0.21.3 # via tensorflow-data-validation, tensorflow-model-analysis, tensorflow-transform, tfx +tfx==0.21.2 # via -r requirements.in (line 1) +tornado==6.0.3 # via ipykernel, jupyter-client, notebook, terminado +traitlets==4.3.3 # via ipykernel, ipython, ipywidgets, jupyter-client, jupyter-core, nbconvert, nbformat, notebook, qtconsole +uritemplate==3.0.1 # via google-api-python-client +urllib3==1.25.8 # via requests +wcwidth==0.1.8 # via prompt-toolkit +webencodings==0.5.1 # via bleach +websocket-client==0.57.0 # via docker +werkzeug==1.0.0 # via tensorboard +wheel==0.34.2 # via tensorboard, tensorflow +widgetsnbextension==3.5.1 # via ipywidgets +wrapt==1.12.0 # via tensorflow +zipp==1.2.0 # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/backend/src/apiserver/BUILD.bazel b/backend/src/apiserver/BUILD.bazel index 8489d944f59..cddbf491fba 100644 --- a/backend/src/apiserver/BUILD.bazel +++ b/backend/src/apiserver/BUILD.bazel @@ -18,18 +18,14 @@ go_library( "//backend/src/apiserver/server:go_default_library", "//backend/src/apiserver/storage:go_default_library", "//backend/src/common/util:go_default_library", - "//backend/src/crd/pkg/client/clientset/versioned/typed/scheduledworkflow/v1beta1:go_default_library", - "@com_github_argoproj_argo//pkg/client/clientset/versioned/typed/workflow/v1alpha1:go_default_library", "@com_github_cenkalti_backoff//:go_default_library", "@com_github_fsnotify_fsnotify//:go_default_library", "@com_github_golang_glog//:go_default_library", "@com_github_grpc_ecosystem_grpc_gateway//runtime:go_default_library", "@com_github_jinzhu_gorm//:go_default_library", "@com_github_jinzhu_gorm//dialects/sqlite:go_default_library", - "@com_github_masterminds_squirrel//:go_default_library", "@com_github_minio_minio_go//:go_default_library", "@com_github_spf13_viper//:go_default_library", - "@io_k8s_client_go//kubernetes/typed/core/v1:go_default_library", "@org_golang_google_grpc//:go_default_library", "@org_golang_google_grpc//reflection:go_default_library", ], diff --git a/backend/src/apiserver/client/BUILD.bazel b/backend/src/apiserver/client/BUILD.bazel index 25f6be6c30d..4e33d9c5c04 100644 --- a/backend/src/apiserver/client/BUILD.bazel +++ b/backend/src/apiserver/client/BUILD.bazel @@ -1,28 +1,59 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", srcs = [ + "argo.go", + "argo_fake.go", + "kfam.go", + "kfam_fake.go", + "kubernetes_core.go", + "kubernetes_core_fake.go", "minio.go", - "pod.go", - "scheduled_workflow.go", + "pod_fake.go", + "scheduled_workflow_fake.go", "sql.go", - "workflow.go", + "swf.go", + "swf_fake.go", + "workflow_fake.go", ], importpath = "github.com/kubeflow/pipelines/backend/src/apiserver/client", visibility = ["//visibility:public"], deps = [ + "//backend/src/common/util:go_default_library", + "//backend/src/crd/pkg/apis/scheduledworkflow/v1beta1:go_default_library", "//backend/src/crd/pkg/client/clientset/versioned:go_default_library", "//backend/src/crd/pkg/client/clientset/versioned/typed/scheduledworkflow/v1beta1:go_default_library", + "@com_github_argoproj_argo//pkg/apis/workflow/v1alpha1:go_default_library", "@com_github_argoproj_argo//pkg/client/clientset/versioned:go_default_library", "@com_github_argoproj_argo//pkg/client/clientset/versioned/typed/workflow/v1alpha1:go_default_library", "@com_github_cenkalti_backoff//:go_default_library", "@com_github_go_sql_driver_mysql//:go_default_library", "@com_github_golang_glog//:go_default_library", "@com_github_minio_minio_go//:go_default_library", + "@com_github_minio_minio_go//pkg/credentials:go_default_library", "@com_github_pkg_errors//:go_default_library", + "@io_k8s_api//core/v1:go_default_library", + "@io_k8s_api//policy/v1beta1:go_default_library", + "@io_k8s_apimachinery//pkg/apis/meta/v1:go_default_library", + "@io_k8s_apimachinery//pkg/types:go_default_library", + "@io_k8s_apimachinery//pkg/watch:go_default_library", "@io_k8s_client_go//kubernetes:go_default_library", "@io_k8s_client_go//kubernetes/typed/core/v1:go_default_library", "@io_k8s_client_go//rest:go_default_library", ], ) + +go_test( + name = "go_default_test", + srcs = [ + "kfam_test.go", + "sql_test.go", + ], + data = glob(["test/**/*"]), # keep + embed = [":go_default_library"], + deps = [ + "@com_github_go_sql_driver_mysql//:go_default_library", + "@com_github_stretchr_testify//assert:go_default_library", + ], +) diff --git a/backend/src/apiserver/client/workflow.go b/backend/src/apiserver/client/argo.go similarity index 52% rename from backend/src/apiserver/client/workflow.go rename to backend/src/apiserver/client/argo.go index 6ff692eb888..0fb78360437 100644 --- a/backend/src/apiserver/client/workflow.go +++ b/backend/src/apiserver/client/argo.go @@ -18,40 +18,42 @@ import ( "time" argoclient "github.com/argoproj/argo/pkg/client/clientset/versioned" - "github.com/argoproj/argo/pkg/client/clientset/versioned/typed/workflow/v1alpha1" + argoprojv1alpha1 "github.com/argoproj/argo/pkg/client/clientset/versioned/typed/workflow/v1alpha1" "github.com/cenkalti/backoff" "github.com/golang/glog" "github.com/pkg/errors" "k8s.io/client-go/rest" ) -func CreateWorkflowClient(namespace string) (v1alpha1.WorkflowInterface, error) { - restConfig, err := rest.InClusterConfig() - if err != nil { - return nil, errors.Wrap(err, "Failed to initialize workflow client.") - } - wfClientSet := argoclient.NewForConfigOrDie(restConfig) - wfClient := wfClientSet.ArgoprojV1alpha1().Workflows(namespace) - return wfClient, nil +type ArgoClientInterface interface { + Workflow(namespace string) argoprojv1alpha1.WorkflowInterface +} + +type ArgoClient struct { + argoProjClient argoprojv1alpha1.ArgoprojV1alpha1Interface } -// creates a new client for the Kubernetes Workflow CRD. -func CreateWorkflowClientOrFatal(namespace string, initConnectionTimeout time.Duration) v1alpha1.WorkflowInterface { - var wfClient v1alpha1.WorkflowInterface - var err error +func (argoClient *ArgoClient) Workflow(namespace string) argoprojv1alpha1.WorkflowInterface { + return argoClient.argoProjClient.Workflows(namespace) +} + +func NewArgoClientOrFatal(initConnectionTimeout time.Duration) *ArgoClient { + var argoProjClient argoprojv1alpha1.ArgoprojV1alpha1Interface var operation = func() error { - wfClient, err = CreateWorkflowClient(namespace) + restConfig, err := rest.InClusterConfig() if err != nil { - return err + return errors.Wrap(err, "Failed to initialize the RestConfig") } + argoProjClient = argoclient.NewForConfigOrDie(restConfig).ArgoprojV1alpha1() return nil } + b := backoff.NewExponentialBackOff() b.MaxElapsedTime = initConnectionTimeout - err = backoff.Retry(operation, b) + err := backoff.Retry(operation, b) if err != nil { - glog.Fatalf("Failed to create workflow client. Error: %v", err) + glog.Fatalf("Failed to create ArgoClient. Error: %v", err) } - return wfClient + return &ArgoClient{argoProjClient} } diff --git a/backend/src/apiserver/client/argo_fake.go b/backend/src/apiserver/client/argo_fake.go new file mode 100644 index 00000000000..5a2b212984b --- /dev/null +++ b/backend/src/apiserver/client/argo_fake.go @@ -0,0 +1,74 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + argoprojv1alpha1 "github.com/argoproj/argo/pkg/client/clientset/versioned/typed/workflow/v1alpha1" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/pkg/errors" +) + +type FakeArgoClient struct { + workflowClientFake *FakeWorkflowClient +} + +func NewFakeArgoClient() *FakeArgoClient { + return &FakeArgoClient{NewWorkflowClientFake()} +} + +func (c *FakeArgoClient) Workflow(namespace string) argoprojv1alpha1.WorkflowInterface { + if len(namespace) == 0 { + panic(util.NewResourceNotFoundError("Namespace", namespace)) + } + return c.workflowClientFake +} + +func (c *FakeArgoClient) GetWorkflowCount() int { + return len(c.workflowClientFake.workflows) +} + +func (c *FakeArgoClient) GetWorkflowKeys() map[string]bool { + result := map[string]bool{} + for key := range c.workflowClientFake.workflows { + result[key] = true + } + return result +} + +func (c *FakeArgoClient) IsTerminated(name string) (bool, error) { + workflow, ok := c.workflowClientFake.workflows[name] + if !ok { + return false, errors.New("No workflow found with name: " + name) + } + + activeDeadlineSeconds := workflow.Spec.ActiveDeadlineSeconds + if activeDeadlineSeconds == nil { + return false, errors.New("No ActiveDeadlineSeconds found in workflow with name: " + name) + } + + return *activeDeadlineSeconds == 0, nil +} + +type FakeArgoClientWithBadWorkflow struct { + workflowClientFake *FakeBadWorkflowClient +} + +func NewFakeArgoClientWithBadWorkflow() *FakeArgoClientWithBadWorkflow { + return &FakeArgoClientWithBadWorkflow{&FakeBadWorkflowClient{}} +} + +func (c *FakeArgoClientWithBadWorkflow) Workflow(namespace string) argoprojv1alpha1.WorkflowInterface { + return c.workflowClientFake +} diff --git a/backend/src/apiserver/client/kfam.go b/backend/src/apiserver/client/kfam.go new file mode 100644 index 00000000000..e47bf70378a --- /dev/null +++ b/backend/src/apiserver/client/kfam.go @@ -0,0 +1,100 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "encoding/json" + "fmt" + "net/http" + "time" + + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/pkg/errors" +) + +type KFAMClientInterface interface { + IsAuthorized(userIdentity string, namespace string) (bool, error) +} + +type KFAMClient struct { + kfamServiceUrl string +} + +type User struct { + Kind string + Name string +} + +type RoleRef struct { + ApiGroup string + Kind string + Name string +} + +type Binding struct { + User User + ReferredNamespace string + RoleRef RoleRef +} + +type Bindings struct { + Bindings []Binding +} + +const ( + HTTP_TIMEOUT_SECONDS = 10 +) + +func (c *KFAMClient) IsAuthorized(userIdentity string, namespace string) (bool, error) { + req, err := http.NewRequest("GET", c.kfamServiceUrl, nil) + if err != nil { + return false, util.NewInternalServerError(err, "Failed to create a KFAM http request.") + } + q := req.URL.Query() + q.Add("user", userIdentity) + req.URL.RawQuery = q.Encode() + + var httpClient = &http.Client{Timeout: HTTP_TIMEOUT_SECONDS * time.Second} + + resp, err := httpClient.Get(req.URL.String()) + if err != nil { + return false, util.NewInternalServerError(err, "Failed to connect to the KFAM service.") + } + if resp.StatusCode != http.StatusOK { + return false, util.NewInternalServerError(errors.New("Requests to the KFAM service failed."), resp.Status) + } + defer resp.Body.Close() + + jsonBindings := new(Bindings) + err = json.NewDecoder(resp.Body).Decode(jsonBindings) + + if err != nil { + return false, util.NewInternalServerError(err, "Failed to parse KFAM response.") + } + + nsFound := false + for _, jsonBinding := range jsonBindings.Bindings { + if jsonBinding.ReferredNamespace == namespace { + nsFound = true + break + } + } + return nsFound, nil +} + +func NewKFAMClient(kfamServiceHost string, kfamServicePort string) *KFAMClient { + kfamServiceUrl := fmt.Sprintf("http://%s:%s/kfam/v1/bindings", kfamServiceHost, kfamServicePort) + return &KFAMClient{kfamServiceUrl} +} diff --git a/backend/src/apiserver/client/kfam_fake.go b/backend/src/apiserver/client/kfam_fake.go new file mode 100644 index 00000000000..f7cc606f4bd --- /dev/null +++ b/backend/src/apiserver/client/kfam_fake.go @@ -0,0 +1,37 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +type FakeKFAMClientAuthorized struct { +} + +func NewFakeKFAMClientAuthorized() *FakeKFAMClientAuthorized { + return &FakeKFAMClientAuthorized{} +} + +func (c *FakeKFAMClientAuthorized) IsAuthorized(userIdentity string, namespace string) (bool, error) { + return true, nil +} + +type FakeKFAMClientUnauthorized struct { +} + +func NewFakeKFAMClientUnauthorized() *FakeKFAMClientUnauthorized { + return &FakeKFAMClientUnauthorized{} +} + +func (c *FakeKFAMClientUnauthorized) IsAuthorized(userIdentity string, namespace string) (bool, error) { + return false, nil +} \ No newline at end of file diff --git a/backend/src/apiserver/client/kfam_test.go b/backend/src/apiserver/client/kfam_test.go new file mode 100644 index 00000000000..8dd61b0770f --- /dev/null +++ b/backend/src/apiserver/client/kfam_test.go @@ -0,0 +1,46 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client +import( + "fmt" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIsAuthorize(t *testing.T) { + expect_response := []byte (`{"bindings":[{"user": {"kind": "User","name": "userA@google.com"},"referredNamespace": "nsA","RoleRef": {"apiGroup": "","kind": "ClusterRole", "name":"edit"}},{"user": {"kind": "User","name": "userA@google.com"},"referredNamespace": "nsB","RoleRef": {"apiGroup": "","kind": "ClusterRole", "name":"admin"}}]}`) + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(200) + w.Write(expect_response) + })) + defer srv.Close() + fmt.Println(srv.URL) + kfam_client := NewKFAMClient("","") + kfam_client.kfamServiceUrl = srv.URL + authorized, err := kfam_client.IsAuthorized("user", "nsA") + assert.Nil(t, err) + assert.True(t, authorized) + + authorized, err = kfam_client.IsAuthorized("user", "nsB") + assert.Nil(t, err) + assert.True(t, authorized) + + authorized, err = kfam_client.IsAuthorized("user", "nsC") + assert.Nil(t, err) + assert.False(t, authorized) +} \ No newline at end of file diff --git a/backend/src/apiserver/client/pod.go b/backend/src/apiserver/client/kubernetes_core.go similarity index 56% rename from backend/src/apiserver/client/pod.go rename to backend/src/apiserver/client/kubernetes_core.go index 637c7e9e93d..5e1282906a4 100644 --- a/backend/src/apiserver/client/pod.go +++ b/backend/src/apiserver/client/kubernetes_core.go @@ -10,7 +10,19 @@ import ( "time" ) -func createPodClient(namespace string) (v1.PodInterface, error) { +type KubernetesCoreInterface interface { + PodClient(namespace string) v1.PodInterface +} + +type KubernetesCore struct { + coreV1Client v1.CoreV1Interface +} + +func (c *KubernetesCore) PodClient(namespace string) v1.PodInterface { + return c.coreV1Client.Pods(namespace) +} + +func createKubernetesCore() (KubernetesCoreInterface, error) { restConfig, err := rest.InClusterConfig() if err != nil { return nil, errors.Wrap(err, "Failed to initialize kubernetes client.") @@ -20,15 +32,15 @@ func createPodClient(namespace string) (v1.PodInterface, error) { if err != nil { return nil, errors.Wrap(err, "Failed to initialize kubernetes client set.") } - return clientSet.CoreV1().Pods(namespace), nil + return &KubernetesCore{clientSet.CoreV1()}, nil } -// CreatePodClientOrFatal creates a new client for the Kubernetes pod. -func CreatePodClientOrFatal(namespace string, initConnectionTimeout time.Duration) v1.PodInterface{ - var client v1.PodInterface +// CreateKubernetesCoreOrFatal creates a new client for the Kubernetes pod. +func CreateKubernetesCoreOrFatal(initConnectionTimeout time.Duration) KubernetesCoreInterface { + var client KubernetesCoreInterface var err error var operation = func() error { - client, err = createPodClient(namespace) + client, err = createKubernetesCore() if err != nil { return err } diff --git a/backend/src/apiserver/client/kubernetes_core_fake.go b/backend/src/apiserver/client/kubernetes_core_fake.go new file mode 100644 index 00000000000..1ea06ac5c08 --- /dev/null +++ b/backend/src/apiserver/client/kubernetes_core_fake.go @@ -0,0 +1,47 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "github.com/kubeflow/pipelines/backend/src/common/util" + v1 "k8s.io/client-go/kubernetes/typed/core/v1" +) + +type FakeKuberneteCoreClient struct { + podClientFake *FakePodClient +} + +func (c *FakeKuberneteCoreClient) PodClient(namespace string) v1.PodInterface { + if len(namespace) == 0 { + panic(util.NewResourceNotFoundError("Namespace", namespace)) + } + return c.podClientFake +} + +func NewFakeKuberneteCoresClient() *FakeKuberneteCoreClient { + return &FakeKuberneteCoreClient{&FakePodClient{}} +} + +type FakeKubernetesCoreClientWithBadPodClient struct { + podClientFake *FakeBadPodClient +} + +func NewFakeKubernetesCoreClientWithBadPodClient() *FakeKubernetesCoreClientWithBadPodClient { + return &FakeKubernetesCoreClientWithBadPodClient{&FakeBadPodClient{}} +} + +func (c *FakeKubernetesCoreClientWithBadPodClient) PodClient(namespace string) v1.PodInterface { + return c.podClientFake +} diff --git a/backend/src/apiserver/client/minio.go b/backend/src/apiserver/client/minio.go index 488bd522adb..682d6f6bea0 100644 --- a/backend/src/apiserver/client/minio.go +++ b/backend/src/apiserver/client/minio.go @@ -16,18 +16,41 @@ package client import ( "fmt" + "net/http" "time" "github.com/cenkalti/backoff" "github.com/golang/glog" minio "github.com/minio/minio-go" + credentials "github.com/minio/minio-go/pkg/credentials" "github.com/pkg/errors" ) +// createCredentialProvidersChain creates a chained providers credential for a minio client +func createCredentialProvidersChain(endpoint, accessKey, secretKey string) *credentials.Credentials { + // first try with static api key + if accessKey != "" && secretKey != "" { + return credentials.NewStaticV4(accessKey, secretKey, "") + } + // otherwise use a chained provider: minioEnv -> awsEnv -> IAM + providers := []credentials.Provider{ + &credentials.EnvMinio{}, + &credentials.EnvAWS{}, + &credentials.IAM{ + Client: &http.Client{ + Transport: http.DefaultTransport, + }, + }, + } + return credentials.New(&credentials.Chain{Providers: providers}) +} + func CreateMinioClient(minioServiceHost string, minioServicePort string, - accessKey string, secretKey string) (*minio.Client, error) { - minioClient, err := minio.New(fmt.Sprintf("%s:%s", minioServiceHost, minioServicePort), - accessKey, secretKey, false /* Secure connection */) + accessKey string, secretKey string, secure bool, region string) (*minio.Client, error) { + + endpoint := joinHostPort(minioServiceHost, minioServicePort) + cred := createCredentialProvidersChain(endpoint, accessKey, secretKey) + minioClient, err := minio.NewWithCredentials(endpoint, cred, secure, region) if err != nil { return nil, errors.Wrapf(err, "Error while creating minio client: %+v", err) } @@ -35,12 +58,12 @@ func CreateMinioClient(minioServiceHost string, minioServicePort string, } func CreateMinioClientOrFatal(minioServiceHost string, minioServicePort string, - accessKey string, secretKey string, initConnectionTimeout time.Duration) *minio.Client { + accessKey string, secretKey string, secure bool, region string, initConnectionTimeout time.Duration) *minio.Client { var minioClient *minio.Client var err error var operation = func() error { minioClient, err = CreateMinioClient(minioServiceHost, minioServicePort, - accessKey, secretKey) + accessKey, secretKey, secure, region) if err != nil { return err } @@ -54,3 +77,13 @@ func CreateMinioClientOrFatal(minioServiceHost string, minioServicePort string, } return minioClient } + +// joinHostPort combines host and port into a network address of the form "host:port". +// +// An empty port value results in "host" instead of "host:" (which net.JoinHostPort would return) +func joinHostPort(host, port string) string { + if port == "" { + return host + } + return fmt.Sprintf("%s:%s", host, port) +} diff --git a/backend/src/apiserver/resource/pod_fake.go b/backend/src/apiserver/client/pod_fake.go similarity index 99% rename from backend/src/apiserver/resource/pod_fake.go rename to backend/src/apiserver/client/pod_fake.go index cfe89a3e08a..68b87c3a938 100644 --- a/backend/src/apiserver/resource/pod_fake.go +++ b/backend/src/apiserver/client/pod_fake.go @@ -1,4 +1,4 @@ -package resource +package client import ( "errors" diff --git a/backend/src/apiserver/resource/scheduled_workflow_fake.go b/backend/src/apiserver/client/scheduled_workflow_fake.go similarity index 79% rename from backend/src/apiserver/resource/scheduled_workflow_fake.go rename to backend/src/apiserver/client/scheduled_workflow_fake.go index 47baefd7153..a8fe234386e 100644 --- a/backend/src/apiserver/resource/scheduled_workflow_fake.go +++ b/backend/src/apiserver/client/scheduled_workflow_fake.go @@ -12,38 +12,38 @@ // See the License for the specific language governing permissions and // limitations under the License. -package resource +package client import ( "errors" "github.com/golang/glog" "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" - "k8s.io/apimachinery/pkg/apis/meta/v1" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/watch" ) type FakeScheduledWorkflowClient struct { - workflows map[string]*v1beta1.ScheduledWorkflow + scheduledWorkflows map[string]*v1beta1.ScheduledWorkflow } func NewScheduledWorkflowClientFake() *FakeScheduledWorkflowClient { return &FakeScheduledWorkflowClient{ - workflows: make(map[string]*v1beta1.ScheduledWorkflow), + scheduledWorkflows: make(map[string]*v1beta1.ScheduledWorkflow), } } -func (c *FakeScheduledWorkflowClient) Create(workflow *v1beta1.ScheduledWorkflow) (*v1beta1.ScheduledWorkflow, error) { - workflow.UID = "123" - workflow.Namespace = "default" - workflow.Name = workflow.GenerateName - c.workflows[workflow.Name] = workflow - return workflow, nil +func (c *FakeScheduledWorkflowClient) Create(scheduledWorkflow *v1beta1.ScheduledWorkflow) (*v1beta1.ScheduledWorkflow, error) { + scheduledWorkflow.UID = "123e4567-e89b-12d3-a456-426655440000" + scheduledWorkflow.Namespace = "ns1" + scheduledWorkflow.Name = scheduledWorkflow.GenerateName + c.scheduledWorkflows[scheduledWorkflow.Name] = scheduledWorkflow + return scheduledWorkflow, nil } func (c *FakeScheduledWorkflowClient) Delete(name string, options *v1.DeleteOptions) error { - delete(c.workflows, name) + delete(c.scheduledWorkflows, name) return nil } @@ -52,9 +52,9 @@ func (c *FakeScheduledWorkflowClient) Patch(name string, pt types.PatchType, dat } func (c *FakeScheduledWorkflowClient) Get(name string, options v1.GetOptions) (*v1beta1.ScheduledWorkflow, error) { - workflow, ok := c.workflows[name] + scheduledWorkflow, ok := c.scheduledWorkflows[name] if ok { - return workflow, nil + return scheduledWorkflow, nil } return nil, errors.New("not found") } diff --git a/backend/src/apiserver/client/sql.go b/backend/src/apiserver/client/sql.go index 949de216491..b973d8dc8db 100644 --- a/backend/src/apiserver/client/sql.go +++ b/backend/src/apiserver/client/sql.go @@ -21,13 +21,25 @@ import ( ) func CreateMySQLConfig(user, password string, mysqlServiceHost string, - mysqlServicePort string, dbName string) *mysql.Config { + mysqlServicePort string, dbName string, mysqlGroupConcatMaxLen string, mysqlExtraParams map[string]string) *mysql.Config { + + params := map[string]string{ + "charset": "utf8", + "parseTime": "True", + "loc": "Local", + "group_concat_max_len": mysqlGroupConcatMaxLen, + } + + for k, v := range mysqlExtraParams { + params[k] = v + } + return &mysql.Config{ User: user, Passwd: password, Net: "tcp", Addr: fmt.Sprintf("%s:%s", mysqlServiceHost, mysqlServicePort), - Params: map[string]string{"charset": "utf8", "parseTime": "True", "loc": "Local"}, + Params: params, DBName: dbName, AllowNativePasswords: true, } diff --git a/backend/src/apiserver/client/sql_test.go b/backend/src/apiserver/client/sql_test.go new file mode 100644 index 00000000000..81d19cc8b5d --- /dev/null +++ b/backend/src/apiserver/client/sql_test.go @@ -0,0 +1,81 @@ +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "reflect" + "testing" + + "github.com/go-sql-driver/mysql" +) + +func TestCreateMySQLConfig(t *testing.T) { + type args struct { + user string + password string + host string + port string + dbName string + mysqlGroupConcatMaxLen string + mysqlExtraParams map[string]string + } + tests := []struct { + name string + args args + want *mysql.Config + }{ + { + name: "default config", + args: args{ + user: "root", + host: "mysql", + port: "3306", + mysqlGroupConcatMaxLen: "1024", + mysqlExtraParams: nil, + }, + want: &mysql.Config{ + User: "root", + Net: "tcp", + Addr: "mysql:3306", + Params: map[string]string{"charset": "utf8", "parseTime": "True", "loc": "Local", "group_concat_max_len": "1024"}, + AllowNativePasswords: true, + }, + }, + { + name: "extra parameters", + args: args{ + user: "root", + host: "mysql", + port: "3306", + mysqlGroupConcatMaxLen: "1024", + mysqlExtraParams: map[string]string{"tls": "true"}, + }, + want: &mysql.Config{ + User: "root", + Net: "tcp", + Addr: "mysql:3306", + Params: map[string]string{"charset": "utf8", "parseTime": "True", "loc": "Local", "group_concat_max_len": "1024", "tls": "true"}, + AllowNativePasswords: true, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := CreateMySQLConfig(tt.args.user, tt.args.password, tt.args.host, tt.args.port, tt.args.dbName, tt.args.mysqlGroupConcatMaxLen, tt.args.mysqlExtraParams); !reflect.DeepEqual(got, tt.want) { + t.Errorf("CreateMySQLConfig() = %#v, want %v", got, tt.want) + } + }) + } +} diff --git a/backend/src/apiserver/client/scheduled_workflow.go b/backend/src/apiserver/client/swf.go similarity index 69% rename from backend/src/apiserver/client/scheduled_workflow.go rename to backend/src/apiserver/client/swf.go index 4a88df8ed01..b6018a1e37b 100644 --- a/backend/src/apiserver/client/scheduled_workflow.go +++ b/backend/src/apiserver/client/swf.go @@ -24,16 +24,28 @@ import ( "k8s.io/client-go/rest" ) +type SwfClientInterface interface { + ScheduledWorkflow(namespace string) v1beta1.ScheduledWorkflowInterface +} + +type SwfClient struct { + swfV1beta1Client v1beta1.ScheduledworkflowV1beta1Interface +} + +func (swfClient *SwfClient) ScheduledWorkflow(namespace string) v1beta1.ScheduledWorkflowInterface { + return swfClient.swfV1beta1Client.ScheduledWorkflows(namespace) +} + // creates a new client for the Kubernetes ScheduledWorkflow CRD. -func CreateScheduledWorkflowClientOrFatal(namespace string, initConnectionTimeout time.Duration) v1beta1.ScheduledWorkflowInterface { - var swfClient v1beta1.ScheduledWorkflowInterface +func NewScheduledWorkflowClientOrFatal(initConnectionTimeout time.Duration) *SwfClient { + var swfClient v1beta1.ScheduledworkflowV1beta1Interface var operation = func() error { restConfig, err := rest.InClusterConfig() if err != nil { return err } swfClientSet := swfclient.NewForConfigOrDie(restConfig) - swfClient = swfClientSet.ScheduledworkflowV1beta1().ScheduledWorkflows(namespace) + swfClient = swfClientSet.ScheduledworkflowV1beta1() return nil } @@ -43,5 +55,5 @@ func CreateScheduledWorkflowClientOrFatal(namespace string, initConnectionTimeou glog.Fatalf("Failed to create scheduled workflow client. Error: %v", err) } - return swfClient + return &SwfClient{swfClient} } diff --git a/backend/src/apiserver/client/swf_fake.go b/backend/src/apiserver/client/swf_fake.go new file mode 100644 index 00000000000..f29c27a2c0d --- /dev/null +++ b/backend/src/apiserver/client/swf_fake.go @@ -0,0 +1,47 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/src/crd/pkg/client/clientset/versioned/typed/scheduledworkflow/v1beta1" +) + +type FakeSwfClient struct { + scheduledWorkflowClientFake *FakeScheduledWorkflowClient +} + +func NewFakeSwfClient() *FakeSwfClient { + return &FakeSwfClient{NewScheduledWorkflowClientFake()} +} + +func (c *FakeSwfClient) ScheduledWorkflow(namespace string) v1beta1.ScheduledWorkflowInterface { + if len(namespace) == 0 { + panic(util.NewResourceNotFoundError("Namespace", namespace)) + } + return c.scheduledWorkflowClientFake +} + +type FakeSwfClientWithBadWorkflow struct { + scheduledWorkflowClientFake *FakeBadScheduledWorkflowClient +} + +func NewFakeSwfClientWithBadWorkflow() *FakeSwfClientWithBadWorkflow { + return &FakeSwfClientWithBadWorkflow{&FakeBadScheduledWorkflowClient{}} +} + +func (c *FakeSwfClientWithBadWorkflow) ScheduledWorkflow(namespace string) v1beta1.ScheduledWorkflowInterface { + return c.scheduledWorkflowClientFake +} diff --git a/backend/src/apiserver/resource/workflow_fake.go b/backend/src/apiserver/client/workflow_fake.go similarity index 83% rename from backend/src/apiserver/resource/workflow_fake.go rename to backend/src/apiserver/client/workflow_fake.go index 257a5bcc546..25992bef655 100644 --- a/backend/src/apiserver/resource/workflow_fake.go +++ b/backend/src/apiserver/client/workflow_fake.go @@ -12,13 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -package resource +package client import ( "encoding/json" - "github.com/kubeflow/pipelines/backend/src/common/util" "strconv" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" "github.com/golang/glog" "github.com/pkg/errors" @@ -49,18 +50,6 @@ func (c *FakeWorkflowClient) Create(workflow *v1alpha1.Workflow) (*v1alpha1.Work return workflow, nil } -func (c *FakeWorkflowClient) GetWorkflowCount() int { - return len(c.workflows) -} - -func (c *FakeWorkflowClient) GetWorkflowKeys() map[string]bool { - result := map[string]bool{} - for key := range c.workflows { - result[key] = true - } - return result -} - func (c *FakeWorkflowClient) Get(name string, options v1.GetOptions) (*v1alpha1.Workflow, error) { workflow, ok := c.workflows[name] if ok { @@ -88,13 +77,13 @@ func (c *FakeWorkflowClient) Delete(name string, options *v1.DeleteOptions) erro } func (c *FakeWorkflowClient) DeleteCollection(options *v1.DeleteOptions, - listOptions v1.ListOptions) error { + listOptions v1.ListOptions) error { glog.Error("This fake method is not yet implemented.") return nil } func (c *FakeWorkflowClient) Patch(name string, pt types.PatchType, data []byte, - subresources ...string) (*v1alpha1.Workflow, error) { + subresources ...string) (*v1alpha1.Workflow, error) { var dat map[string]interface{} json.Unmarshal(data, &dat) @@ -129,20 +118,6 @@ func (c *FakeWorkflowClient) Patch(name string, pt types.PatchType, data []byte, return nil, errors.New("Failed to patch workflow") } -func (c *FakeWorkflowClient) isTerminated(name string) (bool, error) { - workflow, ok := c.workflows[name] - if !ok { - return false, errors.New("No workflow found with name: " + name) - } - - activeDeadlineSeconds := workflow.Spec.ActiveDeadlineSeconds - if activeDeadlineSeconds == nil { - return false, errors.New("No ActiveDeadlineSeconds found in workflow with name: " + name) - } - - return *activeDeadlineSeconds == 0, nil -} - type FakeBadWorkflowClient struct { FakeWorkflowClient } diff --git a/backend/src/apiserver/client_manager.go b/backend/src/apiserver/client_manager.go index 4a81fbf6e78..698c8296321 100644 --- a/backend/src/apiserver/client_manager.go +++ b/backend/src/apiserver/client_manager.go @@ -17,38 +17,41 @@ package main import ( "database/sql" "fmt" - "github.com/kubeflow/pipelines/backend/src/apiserver/common" - v1 "k8s.io/client-go/kubernetes/typed/core/v1" "os" "time" - workflowclient "github.com/argoproj/argo/pkg/client/clientset/versioned/typed/workflow/v1alpha1" "github.com/cenkalti/backoff" "github.com/golang/glog" - "github.com/jinzhu/gorm" _ "github.com/jinzhu/gorm/dialects/sqlite" "github.com/kubeflow/pipelines/backend/src/apiserver/client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/storage" "github.com/kubeflow/pipelines/backend/src/common/util" - scheduledworkflowclient "github.com/kubeflow/pipelines/backend/src/crd/pkg/client/clientset/versioned/typed/scheduledworkflow/v1beta1" "github.com/minio/minio-go" ) const ( - minioServiceHost = "MINIO_SERVICE_SERVICE_HOST" - minioServicePort = "MINIO_SERVICE_SERVICE_PORT" - mysqlServiceHost = "DBConfig.Host" - mysqlServicePort = "DBConfig.Port" - mysqlUser = "DBConfig.User" - mysqlPassword = "DBConfig.Password" - mysqlDBName = "DBConfig.DBName" + minioServiceHost = "MINIO_SERVICE_SERVICE_HOST" + minioServicePort = "MINIO_SERVICE_SERVICE_PORT" + minioServiceRegion = "MINIO_SERVICE_REGION" + minioServiceSecure = "MINIO_SERVICE_SECURE" + pipelineBucketName = "MINIO_PIPELINE_BUCKET_NAME" + pipelinePath = "MINIO_PIPELINE_PATH" + mysqlServiceHost = "DBConfig.Host" + mysqlServicePort = "DBConfig.Port" + mysqlUser = "DBConfig.User" + mysqlPassword = "DBConfig.Password" + mysqlDBName = "DBConfig.DBName" + mysqlGroupConcatMaxLen = "DBConfig.GroupConcatMaxLen" + kfamServiceHost = "PROFILES_KFAM_SERVICE_HOST" + kfamServicePort = "PROFILES_KFAM_SERVICE_PORT" + mysqlExtraParams = "DBConfig.ExtraParams" visualizationServiceHost = "ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST" visualizationServicePort = "ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT" - podNamespace = "POD_NAMESPACE" initConnectionTimeout = "InitConnectionTimeout" ) @@ -63,9 +66,10 @@ type ClientManager struct { dBStatusStore storage.DBStatusStoreInterface defaultExperimentStore storage.DefaultExperimentStoreInterface objectStore storage.ObjectStoreInterface - wfClient workflowclient.WorkflowInterface - swfClient scheduledworkflowclient.ScheduledWorkflowInterface - podClient v1.PodInterface + argoClient client.ArgoClientInterface + swfClient client.SwfClientInterface + k8sCoreClient client.KubernetesCoreInterface + kfamClient client.KFAMClientInterface time util.TimeInterface uuid util.UUIDGeneratorInterface } @@ -102,16 +106,20 @@ func (c *ClientManager) ObjectStore() storage.ObjectStoreInterface { return c.objectStore } -func (c *ClientManager) Workflow() workflowclient.WorkflowInterface { - return c.wfClient +func (c *ClientManager) ArgoClient() client.ArgoClientInterface { + return c.argoClient } -func (c *ClientManager) ScheduledWorkflow() scheduledworkflowclient.ScheduledWorkflowInterface { +func (c *ClientManager) SwfClient() client.SwfClientInterface { return c.swfClient } -func (c *ClientManager) PodClient() v1.PodInterface { - return c.podClient +func (c *ClientManager) KubernetesCoreClient() client.KubernetesCoreInterface { + return c.k8sCoreClient +} + +func (c *ClientManager) KFAMClient() client.KFAMClientInterface { + return c.kfamClient } func (c *ClientManager) Time() util.TimeInterface { @@ -141,18 +149,18 @@ func (c *ClientManager) init() { c.defaultExperimentStore = storage.NewDefaultExperimentStore(db) c.objectStore = initMinioClient(common.GetDurationConfig(initConnectionTimeout)) - c.wfClient = client.CreateWorkflowClientOrFatal( - common.GetStringConfig(podNamespace), common.GetDurationConfig(initConnectionTimeout)) + c.argoClient = client.NewArgoClientOrFatal(common.GetDurationConfig(initConnectionTimeout)) - c.swfClient = client.CreateScheduledWorkflowClientOrFatal( - common.GetStringConfig(podNamespace), common.GetDurationConfig(initConnectionTimeout)) + c.swfClient = client.NewScheduledWorkflowClientOrFatal(common.GetDurationConfig(initConnectionTimeout)) - c.podClient = client.CreatePodClientOrFatal( - common.GetStringConfig(podNamespace), common.GetDurationConfig(initConnectionTimeout)) + c.k8sCoreClient = client.CreateKubernetesCoreOrFatal(common.GetDurationConfig(initConnectionTimeout)) runStore := storage.NewRunStore(db, c.time) c.runStore = runStore + if common.IsMultiUserMode() { + c.kfamClient = client.NewKFAMClient(common.GetStringConfig(kfamServiceHost), common.GetStringConfig(kfamServicePort)) + } glog.Infof("Client manager initialized successfully") } @@ -204,11 +212,26 @@ func initDBClient(initConnectionTimeout time.Duration) *storage.DB { glog.Fatalf("Failed to initialize the databases.") } - response = db.Model(&model.ResourceReference{}).ModifyColumn("Payload", "longtext") + response = db.Model(&model.Experiment{}).RemoveIndex("Name") + if response.Error != nil { + glog.Fatalf("Failed to drop unique key on experiment name. Error: %s", response.Error) + } + + response = db.Model(&model.ResourceReference{}).ModifyColumn("Payload", "longtext not null") if response.Error != nil { glog.Fatalf("Failed to update the resource reference payload type. Error: %s", response.Error) } + response = db.Model(&model.RunDetail{}).AddIndex("experimentuuid_createatinsec", "ExperimentUUID", "CreatedAtInSec") + if response.Error != nil { + glog.Fatalf("Failed to create index experimentuuid_createatinsec on run_details. Error: %s", response.Error) + } + + response = db.Model(&model.RunDetail{}).AddIndex("experimentuuid_conditions_finishedatinsec", "ExperimentUUID", "Conditions", "FinishedAtInSec") + if response.Error != nil { + glog.Fatalf("Failed to create index experimentuuid_conditions_finishedatinsec on run_details. Error: %s", response.Error) + } + response = db.Model(&model.RunMetric{}). AddForeignKey("RunUUID", "run_details(UUID)", "CASCADE" /* onDelete */, "CASCADE" /* update */) if response.Error != nil { @@ -225,6 +248,25 @@ func initDBClient(initConnectionTimeout time.Duration) *storage.DB { if initializePipelineVersions { initPipelineVersionsFromPipelines(db) } + err = backfillExperimentIDToRunTable(db) + if err != nil { + glog.Fatalf("Failed to backfill experiment UUID in run_details table: %s", err) + } + + response = db.Model(&model.Pipeline{}).ModifyColumn("Description", "longtext not null") + if response.Error != nil { + glog.Fatalf("Failed to update pipeline description type. Error: %s", response.Error) + } + + // If the old unique index idx_pipeline_version_uuid_name on pipeline_versions exists, remove it. + rows, err := db.Raw(`show index from pipeline_versions where Key_name="idx_pipeline_version_uuid_name"`).Rows() + if err != nil { + glog.Fatalf("Failed to query pipeline_version table's indices. Error: %s", err) + } + if rows.Next() { + db.Exec(`drop index idx_pipeline_version_uuid_name on pipeline_versions`) + } + rows.Close() return storage.NewDB(db.DB(), storage.NewMySQLDialect()) } @@ -237,7 +279,10 @@ func initMysql(driverName string, initConnectionTimeout time.Duration) string { common.GetStringConfigWithDefault(mysqlPassword, ""), common.GetStringConfigWithDefault(mysqlServiceHost, "mysql"), common.GetStringConfigWithDefault(mysqlServicePort, "3306"), - "") + "", + common.GetStringConfigWithDefault(mysqlGroupConcatMaxLen, "1024"), + common.GetMapConfig(mysqlExtraParams), + ) var db *sql.DB var err error @@ -270,6 +315,11 @@ func initMysql(driverName string, initConnectionTimeout time.Duration) string { util.TerminateIfError(err) mysqlConfig.DBName = dbName + // When updating, return rows matched instead of rows affected. This counts rows that are being + // set as the same values as before. If updating using a primary key and rows matched is 0, then + // it means this row is not found. + // Config reference: https://github.com/go-sql-driver/mysql#clientfoundrows + mysqlConfig.ClientFoundRows = true return mysqlConfig.FormatDSN() } @@ -279,29 +329,37 @@ func initMinioClient(initConnectionTimeout time.Duration) storage.ObjectStoreInt "ObjectStoreConfig.Host", os.Getenv(minioServiceHost)) minioServicePort := common.GetStringConfigWithDefault( "ObjectStoreConfig.Port", os.Getenv(minioServicePort)) - accessKey := common.GetStringConfig("ObjectStoreConfig.AccessKey") - secretKey := common.GetStringConfig("ObjectStoreConfig.SecretAccessKey") - bucketName := common.GetStringConfig("ObjectStoreConfig.BucketName") + minioServiceRegion := common.GetStringConfigWithDefault( + "ObjectStoreConfig.Region", os.Getenv(minioServiceRegion)) + minioServiceSecure := common.GetBoolConfigWithDefault( + "ObjectStoreConfig.Secure", common.GetBoolFromStringWithDefault(os.Getenv(minioServiceSecure), false)) + accessKey := common.GetStringConfigWithDefault("ObjectStoreConfig.AccessKey", "") + secretKey := common.GetStringConfigWithDefault("ObjectStoreConfig.SecretAccessKey", "") + bucketName := common.GetStringConfigWithDefault("ObjectStoreConfig.BucketName", os.Getenv(pipelineBucketName)) + pipelinePath := common.GetStringConfigWithDefault("ObjectStoreConfig.PipelinePath", os.Getenv(pipelinePath)) disableMultipart := common.GetBoolConfigWithDefault("ObjectStoreConfig.Multipart.Disable", true) minioClient := client.CreateMinioClientOrFatal(minioServiceHost, minioServicePort, accessKey, - secretKey, initConnectionTimeout) - createMinioBucket(minioClient, bucketName) + secretKey, minioServiceSecure, minioServiceRegion, initConnectionTimeout) + createMinioBucket(minioClient, bucketName, minioServiceRegion) - return storage.NewMinioObjectStore(&storage.MinioClient{Client: minioClient}, bucketName, disableMultipart) + return storage.NewMinioObjectStore(&storage.MinioClient{Client: minioClient}, bucketName, pipelinePath, disableMultipart) } -func createMinioBucket(minioClient *minio.Client, bucketName string) { +func createMinioBucket(minioClient *minio.Client, bucketName, region string) { + // Check to see if we already own this bucket. + exists, err := minioClient.BucketExists(bucketName) + if err != nil { + glog.Fatalf("Failed to check if Minio bucket exists. Error: %v", err) + } + if exists { + glog.Infof("We already own %s\n", bucketName) + return + } // Create bucket if it does not exist - err := minioClient.MakeBucket(bucketName, "") + err = minioClient.MakeBucket(bucketName, region) if err != nil { - // Check to see if we already own this bucket. - exists, err := minioClient.BucketExists(bucketName) - if err == nil && exists { - glog.Infof("We already own %s\n", bucketName) - } else { - glog.Fatalf("Failed to create Minio bucket. Error: %v", err) - } + glog.Fatalf("Failed to create Minio bucket. Error: %v", err) } glog.Infof("Successfully created bucket %s\n", bucketName) } @@ -344,3 +402,30 @@ func initPipelineVersionsFromPipelines(db *gorm.DB) { tx.Commit() } + +func backfillExperimentIDToRunTable(db *gorm.DB) (retError error) { + // check if there is any row in the run table has experiment ID being empty + rows, err := db.CommonDB().Query(`SELECT ExperimentUUID FROM run_details WHERE ExperimentUUID = '' LIMIT 1`) + if err != nil { + return err + } + defer rows.Close() + + // no row in run_details table has empty ExperimentUUID + if !rows.Next() { + return nil + } + + _, err = db.CommonDB().Exec(` + UPDATE + run_details, resource_references + SET + run_details.ExperimentUUID = resource_references.ReferenceUUID + WHERE + run_details.UUID = resource_references.ResourceUUID + AND resource_references.ResourceType = 'Run' + AND resource_references.ReferenceType = 'Experiment' + AND run_details.ExperimentUUID = '' + `) + return err +} diff --git a/backend/src/apiserver/common/BUILD.bazel b/backend/src/apiserver/common/BUILD.bazel index 5f1b7a81546..282a1f65264 100644 --- a/backend/src/apiserver/common/BUILD.bazel +++ b/backend/src/apiserver/common/BUILD.bazel @@ -1,4 +1,4 @@ -load("@io_bazel_rules_go//go:def.bzl", "go_library") +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") go_library( name = "go_default_library", @@ -8,6 +8,7 @@ go_library( "filter_context.go", "pagination_context.go", "paths.go", + "util.go", ], importpath = "github.com/kubeflow/pipelines/backend/src/apiserver/common", visibility = ["//visibility:public"], @@ -18,3 +19,13 @@ go_library( "@com_github_spf13_viper//:go_default_library", ], ) + +go_test( + name = "go_default_test", + srcs = ["util_test.go"], + embed = [":go_default_library"], + deps = [ + "//backend/api:go_default_library", + "@com_github_stretchr_testify//assert:go_default_library", + ], +) diff --git a/backend/src/apiserver/common/config.go b/backend/src/apiserver/common/config.go index a6bec9a4ebc..6567c59c66e 100644 --- a/backend/src/apiserver/common/config.go +++ b/backend/src/apiserver/common/config.go @@ -22,6 +22,14 @@ import ( "github.com/spf13/viper" ) +const ( + MultiUserMode string = "MULTIUSER" + PodNamespace string = "POD_NAMESPACE" + CacheEnabled string = "CacheEnabled" + DefaultPipelineRunnerServiceAccount string = "DefaultPipelineRunnerServiceAccount" + KubeflowUserIDHeader string = "KUBEFLOW_USERID_HEADER" + KubeflowUserIDPrefix string = "KUBEFLOW_USERID_PREFIX" +) func GetStringConfig(configName string) string { if !viper.IsSet(configName) { @@ -37,6 +45,14 @@ func GetStringConfigWithDefault(configName, value string) string { return viper.GetString(configName) } +func GetMapConfig(configName string) map[string]string { + if !viper.IsSet(configName) { + glog.Infof("Config %s not specified, skipping", configName) + return nil + } + return viper.GetStringMapString(configName) +} + func GetBoolConfigWithDefault(configName string, value bool) bool { if !viper.IsSet(configName) { return value @@ -54,3 +70,31 @@ func GetDurationConfig(configName string) time.Duration { } return viper.GetDuration(configName) } + +func IsMultiUserMode() bool { + return GetBoolConfigWithDefault(MultiUserMode, false) +} + +func GetPodNamespace() string { + return GetStringConfig(PodNamespace) +} + +func GetBoolFromStringWithDefault(value string, defaultValue bool) bool { + boolVal, err := strconv.ParseBool(value) + if err != nil { + return defaultValue + } + return boolVal +} + +func IsCacheEnabled() string { + return GetStringConfigWithDefault(CacheEnabled, "true") +} + +func GetKubeflowUserIDHeader() string { + return GetStringConfigWithDefault(KubeflowUserIDHeader, GoogleIAPUserIdentityHeader) +} + +func GetKubeflowUserIDPrefix() string { + return GetStringConfigWithDefault(KubeflowUserIDPrefix, GoogleIAPUserIdentityPrefix) +} diff --git a/backend/src/apiserver/common/const.go b/backend/src/apiserver/common/const.go index a1fcd849b5e..429036673ef 100644 --- a/backend/src/apiserver/common/const.go +++ b/backend/src/apiserver/common/const.go @@ -23,10 +23,12 @@ type ResourceType string type Relationship string const ( - Experiment ResourceType = "Experiment" - Job ResourceType = "Job" - Run ResourceType = "Run" - Pipeline ResourceType = "pipeline" + Experiment ResourceType = "Experiment" + Job ResourceType = "Job" + Run ResourceType = "Run" + Pipeline ResourceType = "pipeline" + PipelineVersion ResourceType = "PipelineVersion" + Namespace ResourceType = "Namespace" ) const ( @@ -34,12 +36,21 @@ const ( Creator Relationship = "Creator" ) +const ( + GoogleIAPUserIdentityHeader string = "x-goog-authenticated-user-email" + GoogleIAPUserIdentityPrefix string = "accounts.google.com:" +) + func ToModelResourceType(apiType api.ResourceType) (ResourceType, error) { switch apiType { case api.ResourceType_EXPERIMENT: return Experiment, nil case api.ResourceType_JOB: return Job, nil + case api.ResourceType_PIPELINE_VERSION: + return PipelineVersion, nil + case api.ResourceType_NAMESPACE: + return Namespace, nil default: return "", util.NewInvalidInputError("Unsupported resource type: %s", api.ResourceType_name[int32(apiType)]) } diff --git a/backend/src/apiserver/common/util.go b/backend/src/apiserver/common/util.go new file mode 100644 index 00000000000..ba2a3db914b --- /dev/null +++ b/backend/src/apiserver/common/util.go @@ -0,0 +1,41 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common + +import ( + api "github.com/kubeflow/pipelines/backend/api/go_client" +) + +func GetNamespaceFromAPIResourceReferences(resourceRefs []*api.ResourceReference) string { + namespace := "" + for _, resourceRef := range resourceRefs { + if resourceRef.Key.Type == api.ResourceType_NAMESPACE { + namespace = resourceRef.Key.Id + break + } + } + return namespace +} + +func GetExperimentIDFromAPIResourceReferences(resourceRefs []*api.ResourceReference) string { + experimentID := "" + for _, resourceRef := range resourceRefs { + if resourceRef.Key.Type == api.ResourceType_EXPERIMENT { + experimentID = resourceRef.Key.Id + break + } + } + return experimentID +} diff --git a/backend/src/apiserver/common/util_test.go b/backend/src/apiserver/common/util_test.go new file mode 100644 index 00000000000..5ba399f6d3f --- /dev/null +++ b/backend/src/apiserver/common/util_test.go @@ -0,0 +1,104 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package common + +import ( + "testing" + + api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/stretchr/testify/assert" +) + +func TestGetNamespaceFromResourceReferences(t *testing.T) { + tests := []struct { + name string + references []*api.ResourceReference + expectedNamespace string + }{ + { + "resource reference with namespace and experiment", + []*api.ResourceReference{ + { + Key: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, Id: "123"}, + Relationship: api.Relationship_CREATOR, + }, + { + Key: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, Id: "ns"}, + Relationship: api.Relationship_OWNER, + }, + }, + "ns", + }, + { + "resource reference with experiment only", + []*api.ResourceReference{ + { + Key: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, Id: "123"}, + Relationship: api.Relationship_CREATOR, + }, + }, + "", + }, + } + for _, tc := range tests { + namespace := GetNamespaceFromAPIResourceReferences(tc.references) + assert.Equal(t, tc.expectedNamespace, namespace, + "TestGetNamespaceFromResourceReferences(%v) has unexpected result.", tc.name) + } +} + +func TestGetExperimentIDFromResourceReferences(t *testing.T) { + tests := []struct { + name string + references []*api.ResourceReference + expectedExperimentID string + }{ + { + "resource reference with namespace and experiment", + []*api.ResourceReference{ + { + Key: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, Id: "123"}, + Relationship: api.Relationship_CREATOR, + }, + { + Key: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, Id: "ns"}, + Relationship: api.Relationship_OWNER, + }, + }, + "123", + }, + { + "resource reference with namespace only", + []*api.ResourceReference{ + { + Key: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, Id: "ns"}, + Relationship: api.Relationship_OWNER, + }, + }, + "", + }, + } + for _, tc := range tests { + experimentID := GetExperimentIDFromAPIResourceReferences(tc.references) + assert.Equal(t, tc.expectedExperimentID, experimentID, + "TestGetExperimentIDFromResourceReferences(%v) has unexpected result.", tc.name) + } +} diff --git a/backend/src/apiserver/config/config.json b/backend/src/apiserver/config/config.json index 15ce69a2980..17daea2cd7f 100644 --- a/backend/src/apiserver/config/config.json +++ b/backend/src/apiserver/config/config.json @@ -2,13 +2,16 @@ "DBConfig": { "DriverName": "mysql", "DataSourceName": "", - "DBName": "mlpipeline" + "DBName": "mlpipeline", + "GroupConcatMaxLen": "4194304" }, - "ObjectStoreConfig":{ + "ObjectStoreConfig": { "AccessKey": "minio", "SecretAccessKey": "minio123", - "BucketName": "mlpipeline" + "BucketName": "mlpipeline", + "PipelinePath": "pipelines" }, "InitConnectionTimeout": "6m", - "DefaultPipelineRunnerServiceAccount": "pipeline-runner" + "DefaultPipelineRunnerServiceAccount": "pipeline-runner", + "CacheEnabled": "true" } diff --git a/backend/src/apiserver/config/sample_config.json b/backend/src/apiserver/config/sample_config.json index e61cf04dc95..a0830c173cd 100644 --- a/backend/src/apiserver/config/sample_config.json +++ b/backend/src/apiserver/config/sample_config.json @@ -1,27 +1,27 @@ [ { - "name":"[Sample] ML - XGBoost - Training with Confusion Matrix", - "description":"A trainer that does end-to-end distributed training for XGBoost models. For source code, refer to https://github.com/kubeflow/pipelines/tree/master/samples/core/xgboost-spark", - "file":"/samples/core/xgboost_training_cm/xgboost_training_cm.py.tar.gz" + "name": "[Demo] XGBoost - Training with confusion matrix", + "description": "[source code](https://github.com/kubeflow/pipelines/blob/master/samples/core/xgboost_training_cm) [GCP Permission requirements](https://github.com/kubeflow/pipelines/blob/master/samples/core/xgboost_training_cm#requirements). A trainer that does end-to-end distributed training for XGBoost models.", + "file": "/samples/core/xgboost_training_cm/xgboost_training_cm.py.yaml" }, { - "name":"[Sample] Basic - Sequential execution", - "description":"A pipeline with two sequential steps. For source code, refer to https://github.com/kubeflow/pipelines/blob/master/samples/core/sequential/sequential.py", - "file":"/samples/core/sequential/sequential.py.tar.gz" + "name": "[Demo] TFX - Taxi tip prediction model trainer", + "description": "[source code](https://github.com/kubeflow/pipelines/tree/master/samples/core/parameterized_tfx_oss) [GCP Permission requirements](https://github.com/kubeflow/pipelines/blob/master/samples/core/parameterized_tfx_oss#permission). Example pipeline that does classification with model analysis based on a public tax cab dataset.", + "file": "/samples/core/parameterized_tfx_oss/parameterized_tfx_oss.py.yaml" }, { - "name":"[Sample] Basic - Parallel execution", - "description":"A pipeline that downloads two messages in parallel and prints the concatenated result. For source code, refer to https://github.com/kubeflow/pipelines/blob/master/samples/core/parallel_join/parallel_join.py", - "file":"/samples/core/parallel_join/parallel_join.py.tar.gz" + "name": "[Tutorial] Data passing in python components", + "description": "[source code](https://github.com/kubeflow/pipelines/tree/master/samples/tutorials/Data%20passing%20in%20python%20components) Shows how to pass data between python components.", + "file": "/samples/tutorials/Data passing in python components/Data passing in python components - Files.py.yaml" }, { - "name":"[Sample] Basic - Conditional execution", - "description":"A pipeline shows how to use dsl.Condition. For source code, refer to https://github.com/kubeflow/pipelines/blob/master/samples/core/condition/condition.py", - "file":"/samples/core/condition/condition.py.tar.gz" + "name": "[Tutorial] DSL - Control structures", + "description": "[source code](https://github.com/kubeflow/pipelines/tree/master/samples/tutorials/DSL%20-%20Control%20structures) Shows how to use conditional execution and exit handlers. This pipeline will randomly fail to demonstrate that the exit handler gets executed even in case of failure.", + "file": "/samples/tutorials/DSL - Control structures/DSL - Control structures.py.yaml" }, { - "name":"[Sample] Basic - Exit Handler", - "description":"A pipeline that downloads a message and prints it out. Exit Handler will run at the end. For source code, refer to https://github.com/kubeflow/pipelines/blob/master/samples/core/exit_handler/exit_handler.py", - "file":"/samples/core/exit_handler/exit_handler.py.tar.gz" + "name": "[Demo] TFX - Iris classification pipeline", + "description": "[source code](https://github.com/kubeflow/pipelines/tree/master/samples/core/iris). Example pipeline that classifies Iris flower subspecies and how to use native Keras within TFX.", + "file": "/samples/core/iris/iris.py.yaml" } ] diff --git a/backend/src/apiserver/filter/filter.go b/backend/src/apiserver/filter/filter.go index 92075e1158e..6a9646b5068 100644 --- a/backend/src/apiserver/filter/filter.go +++ b/backend/src/apiserver/filter/filter.go @@ -128,18 +128,24 @@ func New(filterProto *api.Filter) (*Filter, error) { return f, nil } -// NewWithKeyMap is like New, but takes an additional map for mapping key names +// NewWithKeyMap is like New, but takes an additional map and model name for mapping key names // in the protocol buffer to an appropriate name for use when querying the -// model. For example, if the API name of a field is "foo" and the equivalent -// model name is "ModelFoo", then filterProto with predicates against key "foo" -// will be parsed as if the key value was "ModelFoo". -func NewWithKeyMap(filterProto *api.Filter, keyMap map[string]string) (*Filter, error) { +// model. For example, if the API name of a field is "name", the model name is "pipelines", and +// the equivalent column name is "Name", then filterProto with predicates against key "name" +// will be parsed as if the key value was "pipelines.Name". +func NewWithKeyMap(filterProto *api.Filter, keyMap map[string]string, modelName string) (*Filter, error) { + // Fully qualify column name to avoid "ambiguous column name" error. + var modelNamePrefix string + if modelName != "" { + modelNamePrefix = modelName + "." + } + for _, pred := range filterProto.Predicates { k, ok := keyMap[pred.Key] if !ok { return nil, util.NewInvalidInputError("no support for filtering on unrecognized field %q", pred.Key) } - pred.Key = k + pred.Key = modelNamePrefix + k } return New(filterProto) } diff --git a/backend/src/apiserver/filter/filter_test.go b/backend/src/apiserver/filter/filter_test.go index 09b4d1b433b..49b12faf7d2 100644 --- a/backend/src/apiserver/filter/filter_test.go +++ b/backend/src/apiserver/filter/filter_test.go @@ -87,6 +87,61 @@ func TestValidNewFilters(t *testing.T) { } } +func TestValidNewFiltersWithKeyMap(t *testing.T) { + opts := []cmp.Option{ + cmp.AllowUnexported(Filter{}), + cmp.FilterPath(func(p cmp.Path) bool { + return p.String() == "filterProto" + }, cmp.Ignore()), + cmpopts.EquateEmpty(), + } + + tests := []struct { + protoStr string + want *Filter + }{ + { + `predicates { key: "name" op: EQUALS string_value: "pipeline" }`, + &Filter{eq: map[string]interface{}{"pipelines.Name": "pipeline"}}, + }, + { + `predicates { key: "name" op: NOT_EQUALS string_value: "pipeline" }`, + &Filter{neq: map[string]interface{}{"pipelines.Name": "pipeline"}}, + }, + { + `predicates { + key: "name" op: IN + string_values { values: 'pipeline_1' values: 'pipeline_2' } }`, + &Filter{in: map[string]interface{}{"pipelines.Name": []string{"pipeline_1", "pipeline_2"}}}, + }, + { + `predicates { + key: "name" op: IS_SUBSTRING string_value: "pipeline" }`, + &Filter{substring: map[string]interface{}{"pipelines.Name": "pipeline"}}, + }, + } + + for _, test := range tests { + filterProto := &api.Filter{} + if err := proto.UnmarshalText(test.protoStr, filterProto); err != nil { + t.Errorf("Failed to unmarshal Filter text proto\n%q\nError: %v", test.protoStr, err) + continue + } + + keyMap := map[string]string{ + "id": "UUID", + "name": "Name", + "created_at": "CreatedAtInSec", + "description": "Description", + } + modelName := "pipelines" + got, err := NewWithKeyMap(filterProto, keyMap, modelName) + if !cmp.Equal(got, test.want, opts...) || err != nil { + t.Errorf("New(%+v) = %+v, %v\nWant %+v, nil", *filterProto, got, err, test.want) + } + } +} + func TestInvalidFilters(t *testing.T) { tests := []struct { protoStr string diff --git a/backend/src/apiserver/list/list.go b/backend/src/apiserver/list/list.go index cf5b226d6e0..2a44b39b4ec 100644 --- a/backend/src/apiserver/list/list.go +++ b/backend/src/apiserver/list/list.go @@ -155,7 +155,7 @@ func NewOptions(listable Listable, pageSize int, sortBy string, filterProto *api // Filtering. if filterProto != nil { - f, err := filter.NewWithKeyMap(filterProto, listable.APIToModelFieldMap()) + f, err := filter.NewWithKeyMap(filterProto, listable.APIToModelFieldMap(), listable.GetModelName()) if err != nil { return nil, err } @@ -248,6 +248,43 @@ func FilterOnResourceReference(tableName string, columns []string, resourceType return selectBuilder, nil } +// FilterOnExperiment filters the given table by rows based on provided experiment ID, +// and returns the rebuilt SelectBuilder +func FilterOnExperiment( + tableName string, + columns []string, + selectCount bool, + experimentID string, +) (sq.SelectBuilder, error) { + return filterByColumnValue(tableName, columns, selectCount, "ExperimentUUID", experimentID), nil +} + +func FilterOnNamespace( + tableName string, + columns []string, + selectCount bool, + namespace string, +) (sq.SelectBuilder, error) { + return filterByColumnValue(tableName, columns, selectCount, "Namespace", namespace), nil +} + +func filterByColumnValue( + tableName string, + columns []string, + selectCount bool, + columnName string, + filterValue interface{}, +) sq.SelectBuilder { + selectBuilder := sq.Select(columns...) + if selectCount { + selectBuilder = sq.Select("count(*)") + } + selectBuilder = selectBuilder.From(tableName).Where( + sq.Eq{columnName: filterValue}, + ) + return selectBuilder +} + // Scans the one given row into a number, and returns the number func ScanRowToTotalSize(rows *sql.Rows) (int, error) { var total_size int diff --git a/backend/src/apiserver/list/list_test.go b/backend/src/apiserver/list/list_test.go index e379af8719a..332acf5e37e 100644 --- a/backend/src/apiserver/list/list_test.go +++ b/backend/src/apiserver/list/list_test.go @@ -736,3 +736,91 @@ func TestFilterOnResourceReference(t *testing.T) { } } } + +func TestFilterOnExperiment(t *testing.T) { + + type testIn struct { + table string + count bool + filter *common.FilterContext + } + tests := []struct { + in *testIn + wantSql string + wantErr error + }{ + { + in: &testIn{ + table: "testTable", + count: false, + filter: &common.FilterContext{}, + }, + wantSql: "SELECT * FROM testTable WHERE ExperimentUUID = ?", + wantErr: nil, + }, + { + in: &testIn{ + table: "testTable", + count: true, + filter: &common.FilterContext{}, + }, + wantSql: "SELECT count(*) FROM testTable WHERE ExperimentUUID = ?", + wantErr: nil, + }, + } + + for _, test := range tests { + sqlBuilder, gotErr := FilterOnExperiment(test.in.table, []string{"*"}, test.in.count, "123") + gotSql, _, err := sqlBuilder.ToSql() + assert.Nil(t, err) + + if gotSql != test.wantSql || gotErr != test.wantErr { + t.Errorf("FilterOnExperiment(%+v) =\nGot: %q, %v\nWant: %q, %v", + test.in, gotSql, gotErr, test.wantSql, test.wantErr) + } + } +} + +func TestFilterOnNamesapce(t *testing.T) { + + type testIn struct { + table string + count bool + filter *common.FilterContext + } + tests := []struct { + in *testIn + wantSql string + wantErr error + }{ + { + in: &testIn{ + table: "testTable", + count: false, + filter: &common.FilterContext{}, + }, + wantSql: "SELECT * FROM testTable WHERE Namespace = ?", + wantErr: nil, + }, + { + in: &testIn{ + table: "testTable", + count: true, + filter: &common.FilterContext{}, + }, + wantSql: "SELECT count(*) FROM testTable WHERE Namespace = ?", + wantErr: nil, + }, + } + + for _, test := range tests { + sqlBuilder, gotErr := FilterOnNamespace(test.in.table, []string{"*"}, test.in.count, "ns") + gotSql, _, err := sqlBuilder.ToSql() + assert.Nil(t, err) + + if gotSql != test.wantSql || gotErr != test.wantErr { + t.Errorf("FilterOnNamespace(%+v) =\nGot: %q, %v\nWant: %q, %v", + test.in, gotSql, gotErr, test.wantSql, test.wantErr) + } + } +} diff --git a/backend/src/apiserver/main.go b/backend/src/apiserver/main.go index 4c8f1b2f212..67c1baf666f 100644 --- a/backend/src/apiserver/main.go +++ b/backend/src/apiserver/main.go @@ -18,24 +18,24 @@ import ( "context" "encoding/json" "flag" - "github.com/fsnotify/fsnotify" - "github.com/kubeflow/pipelines/backend/src/apiserver/common" - "github.com/spf13/viper" + "fmt" "io" "io/ioutil" + "math" "net" "net/http" + "os" "strings" "time" - "fmt" - "os" - + "github.com/fsnotify/fsnotify" "github.com/golang/glog" "github.com/grpc-ecosystem/grpc-gateway/runtime" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/apiserver/server" + "github.com/spf13/viper" "google.golang.org/grpc" "google.golang.org/grpc/reflection" ) @@ -71,13 +71,22 @@ func main() { clientManager.Close() } +// A custom http request header matcher to pass on the user identity +// Reference: https://github.com/grpc-ecosystem/grpc-gateway/blob/master/docs/_docs/customizingyourgateway.md#mapping-from-http-request-headers-to-grpc-client-metadata +func grpcCustomMatcher(key string) (string, bool) { + if strings.EqualFold(key, common.GetKubeflowUserIDHeader()) { + return strings.ToLower(key), true + } + return strings.ToLower(key), false +} + func startRpcServer(resourceManager *resource.ResourceManager) { glog.Info("Starting RPC server") listener, err := net.Listen("tcp", *rpcPortFlag) if err != nil { glog.Fatalf("Failed to start RPC server: %v", err) } - s := grpc.NewServer(grpc.UnaryInterceptor(apiServerInterceptor)) + s := grpc.NewServer(grpc.UnaryInterceptor(apiServerInterceptor), grpc.MaxRecvMsgSize(math.MaxInt32)) api.RegisterPipelineServiceServer(s, server.NewPipelineServer(resourceManager)) api.RegisterExperimentServiceServer(s, server.NewExperimentServer(resourceManager)) api.RegisterRunServiceServer(s, server.NewRunServer(resourceManager)) @@ -90,6 +99,7 @@ func startRpcServer(resourceManager *resource.ResourceManager) { common.GetStringConfig(visualizationServiceHost), common.GetStringConfig(visualizationServicePort), )) + api.RegisterAuthServiceServer(s, server.NewAuthServer(resourceManager)) // Register reflection service on gRPC server. reflection.Register(s) @@ -107,13 +117,14 @@ func startHttpProxy(resourceManager *resource.ResourceManager) { defer cancel() // Create gRPC HTTP MUX and register services. - mux := runtime.NewServeMux() + mux := runtime.NewServeMux(runtime.WithIncomingHeaderMatcher(grpcCustomMatcher)) registerHttpHandlerFromEndpoint(api.RegisterPipelineServiceHandlerFromEndpoint, "PipelineService", ctx, mux) registerHttpHandlerFromEndpoint(api.RegisterExperimentServiceHandlerFromEndpoint, "ExperimentService", ctx, mux) registerHttpHandlerFromEndpoint(api.RegisterJobServiceHandlerFromEndpoint, "JobService", ctx, mux) registerHttpHandlerFromEndpoint(api.RegisterRunServiceHandlerFromEndpoint, "RunService", ctx, mux) registerHttpHandlerFromEndpoint(api.RegisterReportServiceHandlerFromEndpoint, "ReportService", ctx, mux) registerHttpHandlerFromEndpoint(api.RegisterVisualizationServiceHandlerFromEndpoint, "Visualization", ctx, mux) + registerHttpHandlerFromEndpoint(api.RegisterAuthServiceHandlerFromEndpoint, "AuthService", ctx, mux) // Create a top level mux to include both pipeline upload server and gRPC servers. topMux := http.NewServeMux() @@ -123,8 +134,9 @@ func startHttpProxy(resourceManager *resource.ResourceManager) { // https://github.com/grpc-ecosystem/grpc-gateway/issues/410 pipelineUploadServer := server.NewPipelineUploadServer(resourceManager) topMux.HandleFunc("/apis/v1beta1/pipelines/upload", pipelineUploadServer.UploadPipeline) + topMux.HandleFunc("/apis/v1beta1/pipelines/upload_version", pipelineUploadServer.UploadPipelineVersion) topMux.HandleFunc("/apis/v1beta1/healthz", func(w http.ResponseWriter, r *http.Request) { - io.WriteString(w, `{"commit_sha":"`+common.GetStringConfig("COMMIT_SHA")+`"}`) + io.WriteString(w, `{"commit_sha":"`+common.GetStringConfigWithDefault("COMMIT_SHA", "unknown")+`", "tag_name":"`+common.GetStringConfigWithDefault("TAG_NAME", "unknown")+`"}`) }) topMux.Handle("/apis/", mux) @@ -219,4 +231,4 @@ func initConfig() { // Read in config again viper.ReadInConfig() }) -} \ No newline at end of file +} diff --git a/backend/src/apiserver/model/BUILD.bazel b/backend/src/apiserver/model/BUILD.bazel index e6a15d7878f..9e90fadcee5 100644 --- a/backend/src/apiserver/model/BUILD.bazel +++ b/backend/src/apiserver/model/BUILD.bazel @@ -23,16 +23,15 @@ go_test( name = "go_default_test", srcs = [ "pipeline_version_test.go", + "resource_reference_test.go", ], + embed = [":go_default_library"], importpath = "github.com/kubeflow/pipelines/backend/src/apiserver/model", visibility = ["//visibility:public"], - embed = [":go_default_library"], deps = [ "//backend/api:go_default_library", - "//backend/src/apiserver/filter:go_default_library", + "//backend/src/apiserver/common:go_default_library", "//backend/src/apiserver/list:go_default_library", - "@com_github_google_go_cmp//cmp:go_default_library", - "@com_github_google_go_cmp//cmp/cmpopts:go_default_library", "@com_github_masterminds_squirrel//:go_default_library", "@com_github_stretchr_testify//assert:go_default_library", ], diff --git a/backend/src/apiserver/model/experiment.go b/backend/src/apiserver/model/experiment.go index cdf725d5d97..40bdc27c6b8 100644 --- a/backend/src/apiserver/model/experiment.go +++ b/backend/src/apiserver/model/experiment.go @@ -2,9 +2,11 @@ package model type Experiment struct { UUID string `gorm:"column:UUID; not null; primary_key"` - Name string `gorm:"column:Name; not null; unique"` + Name string `gorm:"column:Name; not null; unique_index:idx_name_namespace"` Description string `gorm:"column:Description; not null"` CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null"` + Namespace string `gorm:"column:Namespace; not null; unique_index:idx_name_namespace"` + StorageState string `gorm:"column:StorageState; not null;"` } func (e Experiment) GetValueOfPrimaryKey() string { @@ -26,10 +28,12 @@ func (e *Experiment) DefaultSortField() string { } var experimentAPIToModelFieldMap = map[string]string{ - "id": "UUID", - "name": "Name", - "created_at": "CreatedAtInSec", - "description": "Description", + "id": "UUID", + "name": "Name", + "created_at": "CreatedAtInSec", + "description": "Description", + "namespace": "Namespace", + "storage_state": "StorageState", } // APIToModelFieldMap returns a map from API names to field names for model diff --git a/backend/src/apiserver/model/job.go b/backend/src/apiserver/model/job.go index 43d10c51f8b..2ea467e83ef 100644 --- a/backend/src/apiserver/model/job.go +++ b/backend/src/apiserver/model/job.go @@ -21,8 +21,10 @@ type Job struct { DisplayName string `gorm:"column:DisplayName; not null;"` /* The name that user provides. Can contain special characters*/ Name string `gorm:"column:Name; not null;"` /* The name of the K8s resource. Follow regex '[a-z0-9]([-a-z0-9]*[a-z0-9])?'*/ Namespace string `gorm:"column:Namespace; not null;"` + ServiceAccount string `gorm:"column:ServiceAccount; not null;"` Description string `gorm:"column:Description; not null"` MaxConcurrency int64 `gorm:"column:MaxConcurrency;not null"` + NoCatchup bool `gorm:"column:NoCatchup; not null"` CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null"` /* The time this record is stored in DB*/ UpdatedAtInSec int64 `gorm:"column:UpdatedAtInSec; not null"` Enabled bool `gorm:"column:Enabled; not null"` diff --git a/backend/src/apiserver/model/pipeline.go b/backend/src/apiserver/model/pipeline.go index 5baec4caefc..08100e49ce1 100644 --- a/backend/src/apiserver/model/pipeline.go +++ b/backend/src/apiserver/model/pipeline.go @@ -30,7 +30,7 @@ type Pipeline struct { UUID string `gorm:"column:UUID; not null; primary_key"` CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null"` Name string `gorm:"column:Name; not null; unique"` - Description string `gorm:"column:Description; not null"` + Description string `gorm:"column:Description; not null; size:65535"` // Same as below, set size to large number so it will be stored as longtext // TODO(jingzhang36): remove Parameters when no code is accessing this // field. Should use PipelineVersion.Parameters instead. /* Set size to 65535 so it will be stored as longtext. https://dev.mysql.com/doc/refman/8.0/en/column-count-limit.html */ diff --git a/backend/src/apiserver/model/pipeline_version.go b/backend/src/apiserver/model/pipeline_version.go index 8ff5137badf..1cdb55a9197 100644 --- a/backend/src/apiserver/model/pipeline_version.go +++ b/backend/src/apiserver/model/pipeline_version.go @@ -31,14 +31,14 @@ const ( type PipelineVersion struct { UUID string `gorm:"column:UUID; not null; primary_key"` CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null; index"` - Name string `gorm:"column:Name; not null; unique_index:idx_pipeline_version_uuid_name"` + Name string `gorm:"column:Name; not null; unique_index:idx_pipelineid_name"` // Set size to 65535 so it will be stored as longtext. // https://dev.mysql.com/doc/refman/8.0/en/column-count-limit.html Parameters string `gorm:"column:Parameters; not null; size:65535"` // PipelineVersion belongs to Pipeline. If a pipeline with a specific UUID // is deleted from Pipeline table, all this pipeline's versions will be // deleted from PipelineVersion table. - PipelineId string `gorm:"column:PipelineId; not null;index;"` + PipelineId string `gorm:"column:PipelineId; not null;index; unique_index:idx_pipelineid_name"` Status PipelineVersionStatus `gorm:"column:Status; not null"` // Code source url links to the pipeline version's definition in repo. CodeSourceUrl string `gorm:"column:CodeSourceUrl;"` diff --git a/backend/src/apiserver/model/resource_reference.go b/backend/src/apiserver/model/resource_reference.go index 6336e43571d..ca1331be0fa 100644 --- a/backend/src/apiserver/model/resource_reference.go +++ b/backend/src/apiserver/model/resource_reference.go @@ -14,7 +14,9 @@ package model -import "github.com/kubeflow/pipelines/backend/src/apiserver/common" +import ( + "github.com/kubeflow/pipelines/backend/src/apiserver/common" +) // Resource reference table models the relationship between resources in a loosely coupled way. type ResourceReference struct { @@ -39,3 +41,14 @@ type ResourceReference struct { // The json formatted blob of the resource reference. Payload string `gorm:"column:Payload; not null; size:65535 "` } + +func GetNamespaceFromModelResourceReferences(resourceRefs []*ResourceReference) string { + namespace := "" + for _, resourceRef := range resourceRefs { + if resourceRef.ReferenceType == common.Namespace { + namespace = resourceRef.ReferenceUUID + break + } + } + return namespace +} diff --git a/backend/src/apiserver/model/resource_reference_test.go b/backend/src/apiserver/model/resource_reference_test.go new file mode 100644 index 00000000000..ff58545064e --- /dev/null +++ b/backend/src/apiserver/model/resource_reference_test.go @@ -0,0 +1,51 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestGetNamespaceFromResourceReferencesModel(t *testing.T) { + references := []*ResourceReference{ + { + ReferenceType: common.Experiment, + ReferenceUUID: "123", + ReferenceName: "123", + Relationship: common.Creator, + }, + { + ReferenceType: common.Namespace, + ReferenceName: "ns", + ReferenceUUID: "ns", + Relationship: common.Creator, + }, + } + namespace := GetNamespaceFromModelResourceReferences(references) + assert.Equal(t, "ns", namespace) + + references = []*ResourceReference{ + { + ReferenceType: common.Experiment, + ReferenceUUID: "123", + ReferenceName: "123", + Relationship: common.Creator, + }, + } + namespace = GetNamespaceFromModelResourceReferences(references) + assert.Equal(t, "", namespace) +} diff --git a/backend/src/apiserver/model/run.go b/backend/src/apiserver/model/run.go index 8536878879d..5de92b20758 100644 --- a/backend/src/apiserver/model/run.go +++ b/backend/src/apiserver/model/run.go @@ -16,10 +16,12 @@ package model type Run struct { UUID string `gorm:"column:UUID; not null; primary_key"` + ExperimentUUID string `gorm:"column:ExperimentUUID; not null;"` DisplayName string `gorm:"column:DisplayName; not null;"` /* The name that user provides. Can contain special characters*/ Name string `gorm:"column:Name; not null;"` /* The name of the K8s resource. Follow regex '[a-z0-9]([-a-z0-9]*[a-z0-9])?'*/ StorageState string `gorm:"column:StorageState; not null;"` Namespace string `gorm:"column:Namespace; not null;"` + ServiceAccount string `gorm:"column:ServiceAccount; not null;"` Description string `gorm:"column:Description; not null;"` CreatedAtInSec int64 `gorm:"column:CreatedAtInSec; not null;"` ScheduledAtInSec int64 `gorm:"column:ScheduledAtInSec; default:0;"` diff --git a/backend/src/apiserver/resource/BUILD.bazel b/backend/src/apiserver/resource/BUILD.bazel index c477c49e6aa..576d567ff4e 100644 --- a/backend/src/apiserver/resource/BUILD.bazel +++ b/backend/src/apiserver/resource/BUILD.bazel @@ -5,16 +5,14 @@ go_library( srcs = [ "client_manager_fake.go", "model_converter.go", - "pod_fake.go", "resource_manager.go", "resource_manager_util.go", - "scheduled_workflow_fake.go", - "workflow_fake.go", ], importpath = "github.com/kubeflow/pipelines/backend/src/apiserver/resource", visibility = ["//visibility:public"], deps = [ "//backend/api:go_default_library", + "//backend/src/apiserver/client:go_default_library", "//backend/src/apiserver/common:go_default_library", "//backend/src/apiserver/list:go_default_library", "//backend/src/apiserver/model:go_default_library", @@ -28,14 +26,9 @@ go_library( "@com_github_cenkalti_backoff//:go_default_library", "@com_github_golang_glog//:go_default_library", "@com_github_pkg_errors//:go_default_library", - "@io_k8s_api//core/v1:go_default_library", - "@io_k8s_api//policy/v1beta1:go_default_library", "@io_k8s_apimachinery//pkg/api/errors:go_default_library", "@io_k8s_apimachinery//pkg/apis/meta/v1:go_default_library", "@io_k8s_apimachinery//pkg/types:go_default_library", - "@io_k8s_apimachinery//pkg/watch:go_default_library", - "@io_k8s_client_go//kubernetes/typed/core/v1:go_default_library", - "@io_k8s_client_go//rest:go_default_library", ], ) @@ -49,6 +42,7 @@ go_test( embed = [":go_default_library"], deps = [ "//backend/api:go_default_library", + "//backend/src/apiserver/client:go_default_library", "//backend/src/apiserver/common:go_default_library", "//backend/src/apiserver/model:go_default_library", "//backend/src/apiserver/storage:go_default_library", @@ -56,7 +50,9 @@ go_test( "//backend/src/crd/pkg/apis/scheduledworkflow/v1beta1:go_default_library", "@com_github_argoproj_argo//pkg/apis/workflow/v1alpha1:go_default_library", "@com_github_ghodss_yaml//:go_default_library", + "@com_github_google_go_cmp//cmp:go_default_library", "@com_github_pkg_errors//:go_default_library", + "@com_github_spf13_viper//:go_default_library", "@com_github_stretchr_testify//assert:go_default_library", "@io_bazel_rules_go//proto/wkt:timestamp_go_proto", "@io_k8s_apimachinery//pkg/apis/meta/v1:go_default_library", diff --git a/backend/src/apiserver/resource/client_manager_fake.go b/backend/src/apiserver/resource/client_manager_fake.go index a0e62ce9add..90b8fddb24b 100644 --- a/backend/src/apiserver/resource/client_manager_fake.go +++ b/backend/src/apiserver/resource/client_manager_fake.go @@ -15,37 +15,37 @@ package resource import ( - workflowclient "github.com/argoproj/argo/pkg/client/clientset/versioned/typed/workflow/v1alpha1" "github.com/golang/glog" + "github.com/kubeflow/pipelines/backend/src/apiserver/client" "github.com/kubeflow/pipelines/backend/src/apiserver/storage" "github.com/kubeflow/pipelines/backend/src/common/util" - scheduledworkflowclient "github.com/kubeflow/pipelines/backend/src/crd/pkg/client/clientset/versioned/typed/scheduledworkflow/v1beta1" - v1 "k8s.io/client-go/kubernetes/typed/core/v1" ) const ( DefaultFakeUUID = "123e4567-e89b-12d3-a456-426655440000" + FakeUUIDOne = "123e4567-e89b-12d3-a456-426655440001" ) type FakeClientManager struct { - db *storage.DB - experimentStore storage.ExperimentStoreInterface - pipelineStore storage.PipelineStoreInterface - jobStore storage.JobStoreInterface - runStore storage.RunStoreInterface - resourceReferenceStore storage.ResourceReferenceStoreInterface - dBStatusStore storage.DBStatusStoreInterface - defaultExperimentStore storage.DefaultExperimentStoreInterface - objectStore storage.ObjectStoreInterface - workflowClientFake *FakeWorkflowClient - scheduledWorkflowClientFake *FakeScheduledWorkflowClient - podClientFake v1.PodInterface - time util.TimeInterface - uuid util.UUIDGeneratorInterface + db *storage.DB + experimentStore storage.ExperimentStoreInterface + pipelineStore storage.PipelineStoreInterface + jobStore storage.JobStoreInterface + runStore storage.RunStoreInterface + resourceReferenceStore storage.ResourceReferenceStoreInterface + dBStatusStore storage.DBStatusStoreInterface + defaultExperimentStore storage.DefaultExperimentStoreInterface + objectStore storage.ObjectStoreInterface + ArgoClientFake *client.FakeArgoClient + swfClientFake *client.FakeSwfClient + k8sCoreClientFake *client.FakeKuberneteCoreClient + KfamClientFake client.KFAMClientInterface + time util.TimeInterface + uuid util.UUIDGeneratorInterface } func NewFakeClientManager(time util.TimeInterface, uuid util.UUIDGeneratorInterface) ( - *FakeClientManager, error) { + *FakeClientManager, error) { if time == nil { glog.Fatalf("The time parameter must not be null.") // Must never happen @@ -63,20 +63,21 @@ func NewFakeClientManager(time util.TimeInterface, uuid util.UUIDGeneratorInterf // TODO(neuromage): Pass in metadata.Store instance for tests as well. return &FakeClientManager{ - db: db, - experimentStore: storage.NewExperimentStore(db, time, uuid), - pipelineStore: storage.NewPipelineStore(db, time, uuid), - jobStore: storage.NewJobStore(db, time), - runStore: storage.NewRunStore(db, time), - workflowClientFake: NewWorkflowClientFake(), - resourceReferenceStore: storage.NewResourceReferenceStore(db), - dBStatusStore: storage.NewDBStatusStore(db), - defaultExperimentStore: storage.NewDefaultExperimentStore(db), - objectStore: storage.NewFakeObjectStore(), - scheduledWorkflowClientFake: NewScheduledWorkflowClientFake(), - podClientFake: FakePodClient{}, - time: time, - uuid: uuid, + db: db, + experimentStore: storage.NewExperimentStore(db, time, uuid), + pipelineStore: storage.NewPipelineStore(db, time, uuid), + jobStore: storage.NewJobStore(db, time), + runStore: storage.NewRunStore(db, time), + ArgoClientFake: client.NewFakeArgoClient(), + resourceReferenceStore: storage.NewResourceReferenceStore(db), + dBStatusStore: storage.NewDBStatusStore(db), + defaultExperimentStore: storage.NewDefaultExperimentStore(db), + objectStore: storage.NewFakeObjectStore(), + swfClientFake: client.NewFakeSwfClient(), + k8sCoreClientFake: client.NewFakeKuberneteCoresClient(), + KfamClientFake: client.NewFakeKFAMClientAuthorized(), + time: time, + uuid: uuid, }, nil } @@ -113,8 +114,8 @@ func (f *FakeClientManager) DB() *storage.DB { return f.db } -func (f *FakeClientManager) Workflow() workflowclient.WorkflowInterface { - return f.workflowClientFake +func (f *FakeClientManager) ArgoClient() client.ArgoClientInterface { + return f.ArgoClientFake } func (f *FakeClientManager) JobStore() storage.JobStoreInterface { @@ -137,14 +138,25 @@ func (f *FakeClientManager) DefaultExperimentStore() storage.DefaultExperimentSt return f.defaultExperimentStore } -func (f *FakeClientManager) ScheduledWorkflow() scheduledworkflowclient.ScheduledWorkflowInterface { - return f.scheduledWorkflowClientFake +func (f *FakeClientManager) SwfClient() client.SwfClientInterface { + return f.swfClientFake } -func (f *FakeClientManager) PodClient() v1.PodInterface { - return f.podClientFake +func (f *FakeClientManager) KubernetesCoreClient() client.KubernetesCoreInterface { + return f.k8sCoreClientFake +} + +func (f *FakeClientManager) KFAMClient() client.KFAMClientInterface { + return f.KfamClientFake } func (f *FakeClientManager) Close() error { return f.db.Close() } + +// Update the uuid used in this fake client manager +func (f *FakeClientManager) UpdateUUID(uuid util.UUIDGeneratorInterface) { + f.uuid = uuid + f.experimentStore = storage.NewExperimentStore(f.db, f.time, uuid) + f.pipelineStore = storage.NewPipelineStore(f.db, f.time, uuid) +} diff --git a/backend/src/apiserver/resource/model_converter.go b/backend/src/apiserver/resource/model_converter.go index 82f6d0b9e4e..6e2f9c7f6b0 100644 --- a/backend/src/apiserver/resource/model_converter.go +++ b/backend/src/apiserver/resource/model_converter.go @@ -22,8 +22,27 @@ import ( "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/pkg/errors" ) +func (r *ResourceManager) ToModelExperiment(apiExperiment *api.Experiment) (*model.Experiment, error) { + namespace := "" + resourceReferences := apiExperiment.GetResourceReferences() + if resourceReferences != nil { + if len(resourceReferences) != 1 || + resourceReferences[0].Key.Type != api.ResourceType_NAMESPACE || + resourceReferences[0].Relationship != api.Relationship_OWNER { + return nil, util.NewInternalServerError(errors.New("Invalid resource references for experiment"), "Unable to convert to model experiment.") + } + namespace = resourceReferences[0].Key.Id + } + return &model.Experiment{ + Name: apiExperiment.Name, + Description: apiExperiment.Description, + Namespace: namespace, + }, nil +} + func (r *ResourceManager) ToModelRunMetric(metric *api.RunMetric, runUUID string) *model.RunMetric { return &model.RunMetric{ RunUUID: runUUID, @@ -37,33 +56,40 @@ func (r *ResourceManager) ToModelRunMetric(metric *api.RunMetric, runUUID string // The input run might not contain workflowSpecManifest, but instead a pipeline ID. // The caller would retrieve workflowSpecManifest and pass in. func (r *ResourceManager) ToModelRunDetail(run *api.Run, runId string, workflow *util.Workflow, workflowSpecManifest string) (*model.RunDetail, error) { - params, err := toModelParameters(run.PipelineSpec.Parameters) + params, err := toModelParameters(run.GetPipelineSpec().GetParameters()) if err != nil { return nil, util.Wrap(err, "Unable to parse the parameter.") } - resourceReferences, err := r.toModelResourceReferences(runId, common.Run, run.ResourceReferences) + resourceReferences, err := r.toModelResourceReferences(runId, common.Run, run.GetResourceReferences()) if err != nil { return nil, util.Wrap(err, "Unable to convert resource references.") } var pipelineName string - if run.PipelineSpec.GetPipelineId() != "" { - pipelineName, err = r.getResourceName(common.Pipeline, run.PipelineSpec.GetPipelineId()) + if run.GetPipelineSpec().GetPipelineId() != "" { + pipelineName, err = r.getResourceName(common.Pipeline, run.GetPipelineSpec().GetPipelineId()) if err != nil { return nil, util.Wrap(err, "Error getting the pipeline name") } } + experimentUUID, err := r.getOwningExperimentUUID(run.ResourceReferences) + if err != nil { + return nil, util.Wrap(err, "Error getting the experiment UUID") + } + return &model.RunDetail{ Run: model.Run{ UUID: runId, + ExperimentUUID: experimentUUID, DisplayName: run.Name, Name: workflow.Name, Namespace: workflow.Namespace, + ServiceAccount: workflow.Spec.ServiceAccountName, Conditions: workflow.Condition(), Description: run.Description, ResourceReferences: resourceReferences, PipelineSpec: model.PipelineSpec{ - PipelineId: run.PipelineSpec.GetPipelineId(), + PipelineId: run.GetPipelineSpec().GetPipelineId(), PipelineName: pipelineName, WorkflowSpecManifest: workflowSpecManifest, Parameters: params, @@ -76,34 +102,40 @@ func (r *ResourceManager) ToModelRunDetail(run *api.Run, runId string, workflow } func (r *ResourceManager) ToModelJob(job *api.Job, swf *util.ScheduledWorkflow, workflowSpecManifest string) (*model.Job, error) { - params, err := toModelParameters(job.PipelineSpec.Parameters) + params, err := toModelParameters(job.GetPipelineSpec().GetParameters()) if err != nil { return nil, util.Wrap(err, "Error parsing the input job.") } - resourceReferences, err := r.toModelResourceReferences(string(swf.UID), common.Job, job.ResourceReferences) + resourceReferences, err := r.toModelResourceReferences(string(swf.UID), common.Job, job.GetResourceReferences()) if err != nil { return nil, util.Wrap(err, "Error to convert resource references.") } var pipelineName string - if job.PipelineSpec.GetPipelineId() != "" { - pipelineName, err = r.getResourceName(common.Pipeline, job.PipelineSpec.GetPipelineId()) + if job.GetPipelineSpec().GetPipelineId() != "" { + pipelineName, err = r.getResourceName(common.Pipeline, job.GetPipelineSpec().GetPipelineId()) if err != nil { return nil, util.Wrap(err, "Error getting the pipeline name") } } + serviceAccount := "" + if swf.Spec.Workflow != nil { + serviceAccount = swf.Spec.Workflow.Spec.ServiceAccountName + } return &model.Job{ UUID: string(swf.UID), DisplayName: job.Name, Name: swf.Name, Namespace: swf.Namespace, + ServiceAccount: serviceAccount, Description: job.Description, Conditions: swf.ConditionSummary(), Enabled: job.Enabled, Trigger: toModelTrigger(job.Trigger), MaxConcurrency: job.MaxConcurrency, + NoCatchup: job.NoCatchup, ResourceReferences: resourceReferences, PipelineSpec: model.PipelineSpec{ - PipelineId: job.PipelineSpec.GetPipelineId(), + PipelineId: job.GetPipelineSpec().GetPipelineId(), PipelineName: pipelineName, WorkflowSpecManifest: workflowSpecManifest, Parameters: params, @@ -199,6 +231,8 @@ func (r *ResourceManager) toModelResourceReferences( if err != nil { return nil, util.Wrap(err, "Failed to find the referred resource") } + + //TODO(gaoning777) further investigation: Is the plain namespace a good option? maybe uuid for distinctness even with namespace deletion/recreation. modelRef := &model.ResourceReference{ ResourceUUID: resourceId, ResourceType: resourceType, @@ -238,7 +272,30 @@ func (r *ResourceManager) getResourceName(resourceType common.ResourceType, reso return "", util.Wrap(err, "Referred run not found.") } return run.DisplayName, nil + case common.PipelineVersion: + version, err := r.GetPipelineVersion(resourceId) + if err != nil { + return "", util.Wrap(err, "Referred pipeline version not found.") + } + return version.Name, nil + case common.Namespace: + return resourceId, nil default: return "", util.NewInvalidInputError("Unsupported resource type: %s", string(resourceType)) } } + +func (r *ResourceManager) getOwningExperimentUUID(references []*api.ResourceReference) (string, error) { + var experimentUUID string + for _, ref := range references { + if ref.Key.Type == api.ResourceType_EXPERIMENT && ref.Relationship == api.Relationship_OWNER { + experimentUUID = ref.Key.Id + break + } + } + + if experimentUUID == "" { + return "", util.NewInternalServerError(nil, "Missing owning experiment UUID") + } + return experimentUUID, nil +} diff --git a/backend/src/apiserver/resource/model_converter_test.go b/backend/src/apiserver/resource/model_converter_test.go index a607ced0636..2824a8dec90 100644 --- a/backend/src/apiserver/resource/model_converter_test.go +++ b/backend/src/apiserver/resource/model_converter_test.go @@ -15,10 +15,12 @@ package resource import ( + "strings" "testing" "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" "github.com/golang/protobuf/ptypes/timestamp" + "github.com/google/go-cmp/cmp" api "github.com/kubeflow/pipelines/backend/api/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" @@ -33,6 +35,93 @@ func initResourceManager() (*FakeClientManager, *ResourceManager) { return store, NewResourceManager(store) } +func TestToModelExperiment(t *testing.T) { + store, manager := initResourceManager() + defer store.Close() + + tests := []struct { + name string + experiment *api.Experiment + wantError bool + errorMessage string + expectedModelExperiment *model.Experiment + }{ + { + "No resource references", + &api.Experiment{ + Name: "exp1", + Description: "This is an experiment", + }, + false, + "", + &model.Experiment{ + Name: "exp1", + Description: "This is an experiment", + Namespace: "", + }, + }, + { + "Valid resource references", + &api.Experiment{ + Name: "exp1", + Description: "This is an experiment", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "ns1", + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + false, + "", + &model.Experiment{ + Name: "exp1", + Description: "This is an experiment", + Namespace: "ns1", + }, + }, + { + "Invalid resource references", + &api.Experiment{ + Name: "exp1", + Description: "This is an experiment", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, + Id: "invalid", + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + true, + "Invalid resource references for experiment", + nil, + }, + } + + for _, tc := range tests { + modelExperiment, err := manager.ToModelExperiment(tc.experiment) + if tc.wantError { + if err == nil { + t.Errorf("TestToModelExperiment(%v) expect error but got nil", tc.name) + } else if !strings.Contains(err.Error(), tc.errorMessage) { + t.Errorf("TestToModelExperiment(%v) expect error containing: %v, but got: %v", tc.name, tc.errorMessage, err) + } + } else { + if err != nil { + t.Errorf("TestToModelExperiment(%v) expect no error but got %v", tc.name, err) + } else if !cmp.Equal(tc.expectedModelExperiment, modelExperiment) { + t.Errorf("TestToModelExperiment(%v) expect (%+v) but got (%+v)", tc.name, tc.expectedModelExperiment, modelExperiment) + } + } + } +} + func TestToModelRunMetric(t *testing.T) { store, manager := initResourceManager() defer store.Close() @@ -81,11 +170,12 @@ func TestToModelRunDetail(t *testing.T) { expectedModelRunDetail := &model.RunDetail{ Run: model.Run{ - UUID: "123", - DisplayName: "name1", - Name: "workflow-name", - Conditions: "running", - Description: "this is a run", + UUID: "123", + ExperimentUUID: experiment.UUID, + DisplayName: "name1", + Name: "workflow-name", + Conditions: "running", + Description: "this is a run", PipelineSpec: model.PipelineSpec{ WorkflowSpecManifest: "workflow spec", Parameters: `[{"name":"param2","value":"world"}]`, @@ -114,6 +204,7 @@ func TestToModelJob(t *testing.T) { Name: "name1", Enabled: true, MaxConcurrency: 1, + NoCatchup: true, Trigger: &api.Trigger{ Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ StartTime: ×tamp.Timestamp{Seconds: 1}, @@ -150,6 +241,7 @@ func TestToModelJob(t *testing.T) { }, }, MaxConcurrency: 1, + NoCatchup: true, PipelineSpec: model.PipelineSpec{ PipelineId: pipeline.UUID, PipelineName: pipeline.Name, @@ -198,6 +290,17 @@ func TestToModelResourceReferences_UnknownRefType(t *testing.T) { assert.Contains(t, err.Error(), "Failed to convert reference type") } +func TestToModelResourceReferences_NamespaceRef(t *testing.T) { + store, manager, _ := initWithJob(t) + defer store.Close() + + modelRefs, err := manager.toModelResourceReferences("r1", common.Run, []*api.ResourceReference{ + {Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "e1"}, Relationship: api.Relationship_OWNER}, + }) + assert.Nil(t, err) + assert.Equal(t, 1, len(modelRefs)) +} + func TestToModelResourceReferences_UnknownRelationship(t *testing.T) { store, manager, _ := initWithJob(t) defer store.Close() diff --git a/backend/src/apiserver/resource/resource_manager.go b/backend/src/apiserver/resource/resource_manager.go index 57e617d1b8c..62a707a4bce 100644 --- a/backend/src/apiserver/resource/resource_manager.go +++ b/backend/src/apiserver/resource/resource_manager.go @@ -24,6 +24,7 @@ import ( "github.com/cenkalti/backoff" "github.com/golang/glog" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/list" "github.com/kubeflow/pipelines/backend/src/apiserver/model" @@ -33,14 +34,15 @@ import ( scheduledworkflowclient "github.com/kubeflow/pipelines/backend/src/crd/pkg/client/clientset/versioned/typed/scheduledworkflow/v1beta1" "github.com/pkg/errors" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - corev1 "k8s.io/client-go/kubernetes/typed/core/v1" "k8s.io/apimachinery/pkg/types" ) const ( - defaultPipelineRunnerServiceAccountEnvVar = "DefaultPipelineRunnerServiceAccount" - defaultPipelineRunnerServiceAccount = "pipeline-runner" + defaultPipelineRunnerServiceAccount = "pipeline-runner" + HasDefaultBucketEnvVar = "HAS_DEFAULT_BUCKET" + ProjectIDEnvVar = "PROJECT_ID" + DefaultBucketNameEnvVar = "BUCKET_NAME" ) type ClientManagerInterface interface { @@ -52,52 +54,67 @@ type ClientManagerInterface interface { DBStatusStore() storage.DBStatusStoreInterface DefaultExperimentStore() storage.DefaultExperimentStoreInterface ObjectStore() storage.ObjectStoreInterface - Workflow() workflowclient.WorkflowInterface - ScheduledWorkflow() scheduledworkflowclient.ScheduledWorkflowInterface - PodClient() corev1.PodInterface + ArgoClient() client.ArgoClientInterface + SwfClient() client.SwfClientInterface + KubernetesCoreClient() client.KubernetesCoreInterface + KFAMClient() client.KFAMClientInterface Time() util.TimeInterface UUID() util.UUIDGeneratorInterface } type ResourceManager struct { - experimentStore storage.ExperimentStoreInterface - pipelineStore storage.PipelineStoreInterface - jobStore storage.JobStoreInterface - runStore storage.RunStoreInterface - resourceReferenceStore storage.ResourceReferenceStoreInterface - dBStatusStore storage.DBStatusStoreInterface - defaultExperimentStore storage.DefaultExperimentStoreInterface - objectStore storage.ObjectStoreInterface - workflowClient workflowclient.WorkflowInterface - scheduledWorkflowClient scheduledworkflowclient.ScheduledWorkflowInterface - podClient corev1.PodInterface - time util.TimeInterface - uuid util.UUIDGeneratorInterface + experimentStore storage.ExperimentStoreInterface + pipelineStore storage.PipelineStoreInterface + jobStore storage.JobStoreInterface + runStore storage.RunStoreInterface + resourceReferenceStore storage.ResourceReferenceStoreInterface + dBStatusStore storage.DBStatusStoreInterface + defaultExperimentStore storage.DefaultExperimentStoreInterface + objectStore storage.ObjectStoreInterface + argoClient client.ArgoClientInterface + swfClient client.SwfClientInterface + k8sCoreClient client.KubernetesCoreInterface + kfamClient client.KFAMClientInterface + time util.TimeInterface + uuid util.UUIDGeneratorInterface } func NewResourceManager(clientManager ClientManagerInterface) *ResourceManager { return &ResourceManager{ - experimentStore: clientManager.ExperimentStore(), - pipelineStore: clientManager.PipelineStore(), - jobStore: clientManager.JobStore(), - runStore: clientManager.RunStore(), - resourceReferenceStore: clientManager.ResourceReferenceStore(), - dBStatusStore: clientManager.DBStatusStore(), - defaultExperimentStore: clientManager.DefaultExperimentStore(), - objectStore: clientManager.ObjectStore(), - workflowClient: clientManager.Workflow(), - scheduledWorkflowClient: clientManager.ScheduledWorkflow(), - podClient: clientManager.PodClient(), - time: clientManager.Time(), - uuid: clientManager.UUID(), + experimentStore: clientManager.ExperimentStore(), + pipelineStore: clientManager.PipelineStore(), + jobStore: clientManager.JobStore(), + runStore: clientManager.RunStore(), + resourceReferenceStore: clientManager.ResourceReferenceStore(), + dBStatusStore: clientManager.DBStatusStore(), + defaultExperimentStore: clientManager.DefaultExperimentStore(), + objectStore: clientManager.ObjectStore(), + argoClient: clientManager.ArgoClient(), + swfClient: clientManager.SwfClient(), + k8sCoreClient: clientManager.KubernetesCoreClient(), + kfamClient: clientManager.KFAMClient(), + time: clientManager.Time(), + uuid: clientManager.UUID(), } } +func (r *ResourceManager) getWorkflowClient(namespace string) workflowclient.WorkflowInterface { + return r.argoClient.Workflow(namespace) +} + +func (r *ResourceManager) getScheduledWorkflowClient(namespace string) scheduledworkflowclient.ScheduledWorkflowInterface { + return r.swfClient.ScheduledWorkflow(namespace) +} + func (r *ResourceManager) GetTime() util.TimeInterface { return r.time } -func (r *ResourceManager) CreateExperiment(experiment *model.Experiment) (*model.Experiment, error) { +func (r *ResourceManager) CreateExperiment(apiExperiment *api.Experiment) (*model.Experiment, error) { + experiment, err := r.ToModelExperiment(apiExperiment) + if err != nil { + return nil, util.Wrap(err, "Failed to convert experiment model") + } return r.experimentStore.CreateExperiment(experiment) } @@ -105,9 +122,9 @@ func (r *ResourceManager) GetExperiment(experimentId string) (*model.Experiment, return r.experimentStore.GetExperiment(experimentId) } -func (r *ResourceManager) ListExperiments(opts *list.Options) ( +func (r *ResourceManager) ListExperiments(filterContext *common.FilterContext, opts *list.Options) ( experiments []*model.Experiment, total_size int, nextPageToken string, err error) { - return r.experimentStore.ListExperiments(opts) + return r.experimentStore.ListExperiments(filterContext, opts) } func (r *ResourceManager) DeleteExperiment(experimentID string) error { @@ -118,6 +135,52 @@ func (r *ResourceManager) DeleteExperiment(experimentID string) error { return r.experimentStore.DeleteExperiment(experimentID) } +func (r *ResourceManager) ArchiveExperiment(experimentId string) error { + // To archive an experiment + // (1) update our persistent agent to disable CRDs of jobs in experiment + // (2) update database to + // (2.1) archive experiemnts + // (2.2) archive runs + // (2.3) disable jobs + opts, err := list.NewOptions(&model.Job{}, 50, "name", nil) + if err != nil { + return util.NewInternalServerError(err, + "Failed to create list jobs options when archiving experiment. ") + } + for { + jobs, _, newToken, err := r.jobStore.ListJobs(&common.FilterContext{ + ReferenceKey: &common.ReferenceKey{Type: common.Experiment, ID: experimentId}}, opts) + if err != nil { + return util.NewInternalServerError(err, + "Failed to list jobs of to-be-archived experiment. expID: %v", experimentId) + } + for _, job := range jobs { + _, err = r.getScheduledWorkflowClient(job.Namespace).Patch( + job.Name, + types.MergePatchType, + []byte(fmt.Sprintf(`{"spec":{"enabled":%s}}`, strconv.FormatBool(false)))) + if err != nil { + return util.NewInternalServerError(err, + "Failed to disable job CRD. jobID: %v", job.UUID) + } + } + if newToken == "" { + break + } else { + opts, err = list.NewOptionsFromToken(newToken, 50) + if err != nil { + return util.NewInternalServerError(err, + "Failed to create list jobs options from page token when archiving experiment. ") + } + } + } + return r.experimentStore.ArchiveExperiment(experimentId) +} + +func (r *ResourceManager) UnarchiveExperiment(experimentId string) error { + return r.experimentStore.UnarchiveExperiment(experimentId) +} + func (r *ResourceManager) ListPipelines(opts *list.Options) ( pipelines []*model.Pipeline, total_size int, nextPageToken string, err error) { return r.pipelineStore.ListPipelines(opts) @@ -148,7 +211,7 @@ func (r *ResourceManager) DeletePipeline(pipelineId string) error { // versions and hence multiple files, and we shall improve performance by // either using async deletion in order for this method to be non-blocking // or or exploring other performance optimization tools provided by gcs. - err = r.objectStore.DeleteFile(storage.CreatePipelinePath(fmt.Sprint(pipelineId))) + err = r.objectStore.DeleteFile(r.objectStore.GetPipelineKey(fmt.Sprint(pipelineId))) if err != nil { glog.Errorf("%v", errors.Wrapf(err, "Failed to delete pipeline file for pipeline %v", pipelineId)) return nil @@ -184,7 +247,7 @@ func (r *ResourceManager) CreatePipeline(name string, description string, pipeli // Store the pipeline file to a path dependent on pipeline version err = r.objectStore.AddFile(pipelineFile, - storage.CreatePipelinePath(fmt.Sprint(newPipeline.DefaultVersion.UUID))) + r.objectStore.GetPipelineKey(fmt.Sprint(newPipeline.DefaultVersion.UUID))) if err != nil { return nil, util.Wrap(err, "Create pipeline failed") } @@ -221,7 +284,7 @@ func (r *ResourceManager) GetPipelineTemplate(pipelineId string) ([]byte, error) return nil, util.Wrap(err, "Get pipeline template failed since no default version is defined") } - template, err := r.objectStore.GetFile(storage.CreatePipelinePath(fmt.Sprint(pipeline.DefaultVersion.UUID))) + template, err := r.objectStore.GetFile(r.objectStore.GetPipelineKey(fmt.Sprint(pipeline.DefaultVersion.UUID))) if err != nil { return nil, util.Wrap(err, "Get pipeline template failed") } @@ -230,10 +293,21 @@ func (r *ResourceManager) GetPipelineTemplate(pipelineId string) ([]byte, error) } func (r *ResourceManager) CreateRun(apiRun *api.Run) (*model.RunDetail, error) { - // Get workflow from pipeline spec, which might be pipeline ID or an argo workflow - workflowSpecManifestBytes, err := r.getWorkflowSpecBytes(apiRun.GetPipelineSpec()) + // Get workflow from either of the two places: + // (1) raw pipeline manifest in pipeline_spec + // (2) pipeline version in resource_references + // And the latter takes priority over the former + var workflowSpecManifestBytes []byte + err := ConvertPipelineIdToDefaultPipelineVersion(apiRun.PipelineSpec, &apiRun.ResourceReferences, r) if err != nil { - return nil, util.Wrap(err, "Failed to fetch workflow spec.") + return nil, util.Wrap(err, "Failed to find default version to create run with pipeline id.") + } + workflowSpecManifestBytes, err = r.getWorkflowSpecBytesFromPipelineVersion(apiRun.GetResourceReferences()) + if err != nil { + workflowSpecManifestBytes, err = r.getWorkflowSpecBytesFromPipelineSpec(apiRun.GetPipelineSpec()) + if err != nil { + return nil, util.Wrap(err, "Failed to fetch workflow spec.") + } } uuid, err := r.uuid.NewRandom() if err != nil { @@ -253,11 +327,27 @@ func (r *ResourceManager) CreateRun(apiRun *api.Run) (*model.RunDetail, error) { return nil, util.Wrap(err, "Failed to verify parameters.") } - workflow.SetServiceAccount(r.getDefaultSA()) + r.setDefaultServiceAccount(&workflow, apiRun.GetServiceAccount()) + + // Disable istio sidecar injection + workflow.SetAnnotationsToAllTemplates(util.AnnotationKeyIstioSidecarInject, util.AnnotationValueIstioSidecarInjectDisabled) + // Add a KFP specific label for cache service filtering. The cache_enabled flag here is a global control for whether cache server will + // receive targeting pods. Since cache server only receives pods in step level, the resource manager here will set this global label flag + // on every single step/pod so the cache server can understand. + // TODO: Add run_level flag with similar logic by reading flag value from create_run api. + workflow.SetLabelsToAllTemplates(util.LabelKeyCacheEnabled, common.IsCacheEnabled()) // Append provided parameter workflow.OverrideParameters(parameters) + + err = OverrideParameterWithSystemDefault(workflow, apiRun) + if err != nil { + return nil, err + } + // Add label to the workflow so it can be persisted by persistent agent later. workflow.SetLabels(util.LabelKeyWorkflowRunId, runId) + // Add run name annotation to the workflow so that it can be logged by the Metadata Writer. + workflow.SetAnnotations(util.AnnotationKeyRunName, apiRun.Name) // Replace {{workflow.uid}} with runId err = workflow.ReplaceUID(runId) if err != nil { @@ -266,7 +356,6 @@ func (r *ResourceManager) CreateRun(apiRun *api.Run) (*model.RunDetail, error) { // Marking auto-added artifacts as optional. Otherwise most older workflows will start failing after upgrade to Argo 2.3. // TODO: Fix the components to explicitly declare the artifacts they really output. - // TODO: Change the compiler to stop auto-adding those two atrifacts to all tasks. for templateIdx, template := range workflow.Workflow.Spec.Templates { for artIdx, artifact := range template.Outputs.Artifacts { if artifact.Name == "mlpipeline-ui-metadata" || artifact.Name == "mlpipeline-metrics" { @@ -275,19 +364,24 @@ func (r *ResourceManager) CreateRun(apiRun *api.Run) (*model.RunDetail, error) { } } - // Create argo workflow CRD resource - newWorkflow, err := r.workflowClient.Create(workflow.Get()) + // Add a reference to the default experiment if run does not already have a containing experiment + ref, err := r.getDefaultExperimentIfNoExperiment(apiRun.GetResourceReferences()) if err != nil { - return nil, util.NewInternalServerError(err, "Failed to create a workflow for (%s)", workflow.Name) + return nil, err + } + if ref != nil { + apiRun.ResourceReferences = append(apiRun.GetResourceReferences(), ref) } - // Add a reference to the default experiment if run does not already have a containing experiment - ref, err := r.getDefaultExperimentIfNoExperiment(apiRun.ResourceReferences) + namespace, err := r.getNamespaceFromExperiment(apiRun.GetResourceReferences()) if err != nil { return nil, err } - if ref != nil { - apiRun.ResourceReferences = append(apiRun.ResourceReferences, ref) + + // Create argo workflow CRD resource + newWorkflow, err := r.getWorkflowClient(namespace).Create(workflow.Get()) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create a workflow for (%s)", workflow.Name) } // Store run metadata into database @@ -323,7 +417,11 @@ func (r *ResourceManager) DeleteRun(runID string) error { if err != nil { return util.Wrap(err, "Delete run failed") } - err = r.workflowClient.Delete(runDetail.Name, &v1.DeleteOptions{}) + namespace, err := r.GetNamespaceFromRunID(runID) + if err != nil { + return util.Wrap(err, "Delete run failed") + } + err = r.getWorkflowClient(namespace).Delete(runDetail.Name, &v1.DeleteOptions{}) if err != nil { // API won't need to delete the workflow CRD // once persistent agent sync the state to DB and set TTL for it. @@ -369,12 +467,17 @@ func (r *ResourceManager) TerminateRun(runId string) error { return util.Wrap(err, "Terminate run failed") } + namespace, err := r.GetNamespaceFromRunID(runId) + if err != nil { + return util.Wrap(err, "Terminate run failed") + } + err = r.runStore.TerminateRun(runId) if err != nil { return util.Wrap(err, "Terminate run failed") } - err = TerminateWorkflow(r.workflowClient, runDetail.Run.Name) + err = TerminateWorkflow(r.getWorkflowClient(namespace), runDetail.Run.Name) if err != nil { return util.NewInternalServerError(err, "Failed to terminate the run") } @@ -386,6 +489,10 @@ func (r *ResourceManager) RetryRun(runId string) error { if err != nil { return util.Wrap(err, "Retry run failed") } + namespace, err := r.GetNamespaceFromRunID(runId) + if err != nil { + return util.Wrap(err, "Retry run failed") + } if runDetail.WorkflowRuntimeManifest == "" { return util.NewBadRequestError(errors.New("workflow cannot be retried"), "Workflow must be Failed/Error to retry") @@ -400,16 +507,16 @@ func (r *ResourceManager) RetryRun(runId string) error { return util.Wrap(err, "Retry run failed.") } - if err = deletePods(r.podClient, podsToDelete); err != nil { + if err = deletePods(r.k8sCoreClient, podsToDelete, namespace); err != nil { return util.NewInternalServerError(err, "Retry run failed. Failed to clean up the failed pods from previous run.") } // First try to update workflow - updateError := r.updateWorkflow(newWorkflow) + updateError := r.updateWorkflow(newWorkflow, namespace) if updateError != nil { // Remove resource version newWorkflow.ResourceVersion = "" - newCreatedWorkflow, createError := r.workflowClient.Create(newWorkflow.Workflow) + newCreatedWorkflow, createError := r.getWorkflowClient(namespace).Create(newWorkflow.Workflow) if createError != nil { return util.NewInternalServerError(createError, "Retry run failed. Failed to create or update the run. Update Error: %s, Create Error: %s", @@ -424,15 +531,15 @@ func (r *ResourceManager) RetryRun(runId string) error { return nil } -func (r *ResourceManager) updateWorkflow(newWorkflow *util.Workflow) error { +func (r *ResourceManager) updateWorkflow(newWorkflow *util.Workflow, namespace string) error { // If fail to get the workflow, return error. - latestWorkflow, err := r.workflowClient.Get(newWorkflow.Name, v1.GetOptions{}) + latestWorkflow, err := r.getWorkflowClient(namespace).Get(newWorkflow.Name, v1.GetOptions{}) if err != nil { return err } // Update the workflow's resource version to latest. newWorkflow.ResourceVersion = latestWorkflow.ResourceVersion - _, err = r.workflowClient.Update(newWorkflow.Workflow) + _, err = r.getWorkflowClient(namespace).Update(newWorkflow.Workflow) return err } @@ -441,11 +548,23 @@ func (r *ResourceManager) GetJob(id string) (*model.Job, error) { } func (r *ResourceManager) CreateJob(apiJob *api.Job) (*model.Job, error) { - // Get workflow from pipeline spec, which might be pipeline ID or an argo workflow - workflowSpecManifestBytes, err := r.getWorkflowSpecBytes(apiJob.GetPipelineSpec()) + // Get workflow from either of the two places: + // (1) raw pipeline manifest in pipeline_spec + // (2) pipeline version in resource_references + // And the latter takes priority over the former + var workflowSpecManifestBytes []byte + err := ConvertPipelineIdToDefaultPipelineVersion(apiJob.PipelineSpec, &apiJob.ResourceReferences, r) if err != nil { - return nil, util.Wrap(err, "Failed to fetch workflow spec.") + return nil, util.Wrap(err, "Failed to find default version to create job with pipeline id.") } + workflowSpecManifestBytes, err = r.getWorkflowSpecBytesFromPipelineVersion(apiJob.GetResourceReferences()) + if err != nil { + workflowSpecManifestBytes, err = r.getWorkflowSpecBytesFromPipelineSpec(apiJob.GetPipelineSpec()) + if err != nil { + return nil, util.Wrap(err, "Failed to fetch workflow spec.") + } + } + var workflow util.Workflow err = json.Unmarshal(workflowSpecManifestBytes, &workflow) if err != nil { @@ -454,18 +573,20 @@ func (r *ResourceManager) CreateJob(apiJob *api.Job) (*model.Job, error) { } // Verify no additional parameter provided - err = workflow.VerifyParameters(toParametersMap(apiJob.PipelineSpec.Parameters)) + err = workflow.VerifyParameters(toParametersMap(apiJob.GetPipelineSpec().GetParameters())) if err != nil { return nil, util.Wrap(err, "Create job failed") } + + r.setDefaultServiceAccount(&workflow, apiJob.GetServiceAccount()) + + // Disable istio sidecar injection + workflow.SetAnnotationsToAllTemplates(util.AnnotationKeyIstioSidecarInject, util.AnnotationValueIstioSidecarInjectDisabled) + swfGeneratedName, err := toSWFCRDResourceGeneratedName(apiJob.Name) if err != nil { return nil, util.Wrap(err, "Create job failed") } - - // Set workflow to be run using default pipeline runner service account. - workflow.SetServiceAccount(r.getDefaultSA()) - scheduledWorkflow := &scheduledworkflow.ScheduledWorkflow{ ObjectMeta: v1.ObjectMeta{GenerateName: swfGeneratedName}, Spec: scheduledworkflow.ScheduledWorkflowSpec{ @@ -473,23 +594,40 @@ func (r *ResourceManager) CreateJob(apiJob *api.Job) (*model.Job, error) { MaxConcurrency: &apiJob.MaxConcurrency, Trigger: *toCRDTrigger(apiJob.Trigger), Workflow: &scheduledworkflow.WorkflowResource{ - Parameters: toCRDParameter(apiJob.PipelineSpec.Parameters), + Parameters: toCRDParameter(apiJob.GetPipelineSpec().GetParameters()), Spec: workflow.Spec, }, + NoCatchup: util.BoolPointer(apiJob.NoCatchup), }, } - newScheduledWorkflow, err := r.scheduledWorkflowClient.Create(scheduledWorkflow) - if err != nil { - return nil, util.NewInternalServerError(err, "Failed to create a scheduled workflow for (%s)", scheduledWorkflow.Name) + + // Marking auto-added artifacts as optional. Otherwise most older workflows will start failing after upgrade to Argo 2.3. + // TODO: Fix the components to explicitly declare the artifacts they really output. + for templateIdx, template := range scheduledWorkflow.Spec.Workflow.Spec.Templates { + for artIdx, artifact := range template.Outputs.Artifacts { + if artifact.Name == "mlpipeline-ui-metadata" || artifact.Name == "mlpipeline-metrics" { + scheduledWorkflow.Spec.Workflow.Spec.Templates[templateIdx].Outputs.Artifacts[artIdx].Optional = true + } + } } // Add a reference to the default experiment if run does not already have a containing experiment - ref, err := r.getDefaultExperimentIfNoExperiment(apiJob.ResourceReferences) + ref, err := r.getDefaultExperimentIfNoExperiment(apiJob.GetResourceReferences()) if err != nil { return nil, err } if ref != nil { - apiJob.ResourceReferences = append(apiJob.ResourceReferences, ref) + apiJob.ResourceReferences = append(apiJob.GetResourceReferences(), ref) + } + + namespace, err := r.getNamespaceFromExperiment(apiJob.GetResourceReferences()) + if err != nil { + return nil, err + } + + newScheduledWorkflow, err := r.getScheduledWorkflowClient(namespace).Create(scheduledWorkflow) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create a scheduled workflow for (%s)", scheduledWorkflow.Name) } job, err := r.ToModelJob(apiJob, util.NewScheduledWorkflow(newScheduledWorkflow), string(workflowSpecManifestBytes)) @@ -508,7 +646,8 @@ func (r *ResourceManager) EnableJob(jobID string, enabled bool) error { if err != nil { return util.Wrap(err, "Enable/Disable job failed") } - _, err = r.scheduledWorkflowClient.Patch( + + _, err = r.getScheduledWorkflowClient(job.Namespace).Patch( job.Name, types.MergePatchType, []byte(fmt.Sprintf(`{"spec":{"enabled":%s}}`, strconv.FormatBool(enabled)))) @@ -532,7 +671,8 @@ func (r *ResourceManager) DeleteJob(jobID string) error { if err != nil { return util.Wrap(err, "Delete job failed") } - err = r.scheduledWorkflowClient.Delete(job.Name, &v1.DeleteOptions{}) + + err = r.getScheduledWorkflowClient(job.Namespace).Delete(job.Name, &v1.DeleteOptions{}) if err != nil { return util.NewInternalServerError(err, "Delete job CRD failed.") } @@ -550,10 +690,13 @@ func (r *ResourceManager) ReportWorkflowResource(workflow *util.Workflow) error } runId := workflow.ObjectMeta.Labels[util.LabelKeyWorkflowRunId] jobId := workflow.ScheduledWorkflowUUIDAsStringOrEmpty() + if len(workflow.Namespace) == 0 { + return util.NewInvalidInputError("Workflow missing namespace") + } if workflow.PersistedFinalState() { // If workflow's final state has being persisted, the workflow should be garbage collected. - err := r.workflowClient.Delete(workflow.Name, &v1.DeleteOptions{}) + err := r.getWorkflowClient(workflow.Namespace).Delete(workflow.Name, &v1.DeleteOptions{}) if err != nil { return util.NewInternalServerError(err, "Failed to delete the completed workflow for run %s", runId) } @@ -621,7 +764,7 @@ func (r *ResourceManager) ReportWorkflowResource(workflow *util.Workflow) error } if workflow.IsInFinalState() { - err := AddWorkflowLabel(r.workflowClient, workflow.Name, util.LabelKeyWorkflowPersistedFinalState, "true") + err := AddWorkflowLabel(r.getWorkflowClient(workflow.Namespace), workflow.Name, util.LabelKeyWorkflowPersistedFinalState, "true") if err != nil { return util.Wrap(err, "Failed to add PersistedFinalState label to workflow") } @@ -666,7 +809,8 @@ func (r *ResourceManager) checkJobExist(jobID string) (*model.Job, error) { if err != nil { return nil, util.Wrap(err, "Check job exist failed") } - scheduledWorkflow, err := r.scheduledWorkflowClient.Get(job.Name, v1.GetOptions{}) + + scheduledWorkflow, err := r.getScheduledWorkflowClient(job.Namespace).Get(job.Name, v1.GetOptions{}) if err != nil { return nil, util.NewInternalServerError(err, "Check job exist failed") } @@ -687,21 +831,32 @@ func (r *ResourceManager) checkRunExist(runID string) (*model.RunDetail, error) return runDetail, nil } -func (r *ResourceManager) getWorkflowSpecBytes(spec *api.PipelineSpec) ([]byte, error) { - if spec.GetPipelineId() != "" { - var workflow util.Workflow - err := r.objectStore.GetFromYamlFile(&workflow, storage.CreatePipelinePath(spec.GetPipelineId())) - if err != nil { - return nil, util.Wrap(err, "Get pipeline YAML failed.") - } - - return []byte(workflow.ToStringForStore()), nil - } else if spec.GetWorkflowManifest() != "" { +func (r *ResourceManager) getWorkflowSpecBytesFromPipelineSpec(spec *api.PipelineSpec) ([]byte, error) { + if spec.GetWorkflowManifest() != "" { return []byte(spec.GetWorkflowManifest()), nil } return nil, util.NewInvalidInputError("Please provide a valid pipeline spec") } +func (r *ResourceManager) getWorkflowSpecBytesFromPipelineVersion(references []*api.ResourceReference) ([]byte, error) { + var pipelineVersionId = "" + for _, reference := range references { + if reference.Key.Type == api.ResourceType_PIPELINE_VERSION && reference.Relationship == api.Relationship_CREATOR { + pipelineVersionId = reference.Key.Id + } + } + if len(pipelineVersionId) == 0 { + return nil, util.NewInvalidInputError("No pipeline version.") + } + var workflow util.Workflow + err := r.objectStore.GetFromYamlFile(&workflow, r.objectStore.GetPipelineKey(pipelineVersionId)) + if err != nil { + return nil, util.Wrap(err, "Get pipeline YAML failed.") + } + + return []byte(workflow.ToStringForStore()), nil +} + // Used to initialize the Experiment database with a default to be used for runs func (r *ResourceManager) CreateDefaultExperiment() (string, error) { // First check that we don't already have a default experiment ID in the DB. @@ -716,7 +871,7 @@ func (r *ResourceManager) CreateDefaultExperiment() (string, error) { } // Create default experiment - defaultExperiment := &model.Experiment{ + defaultExperiment := &api.Experiment{ Name: "Default", Description: "All runs created without specifying an experiment will be grouped here.", } @@ -744,7 +899,13 @@ func (r *ResourceManager) getDefaultExperimentIfNoExperiment(references []*api.R return nil, nil } } + if common.IsMultiUserMode() { + return nil, util.NewInvalidInputError("Experiment is required in resource references.") + } + return r.getDefaultExperimentResourceReference(references) +} +func (r *ResourceManager) getDefaultExperimentResourceReference(references []*api.ResourceReference) (*api.ResourceReference, error) { // Create reference to the default experiment defaultExperimentId, err := r.GetDefaultExperimentId() if err != nil { @@ -812,5 +973,161 @@ func (r *ResourceManager) MarkSampleLoaded() error { } func (r *ResourceManager) getDefaultSA() string { - return common.GetStringConfigWithDefault(defaultPipelineRunnerServiceAccountEnvVar, defaultPipelineRunnerServiceAccount) + return common.GetStringConfigWithDefault(common.DefaultPipelineRunnerServiceAccount, defaultPipelineRunnerServiceAccount) +} + +func (r *ResourceManager) CreatePipelineVersion(apiVersion *api.PipelineVersion, pipelineFile []byte) (*model.PipelineVersion, error) { + // Extract the parameters from the pipeline + params, err := util.GetParameters(pipelineFile) + if err != nil { + return nil, util.Wrap(err, "Create pipeline version failed") + } + + // Extract pipeline id + var pipelineId = "" + for _, resourceReference := range apiVersion.ResourceReferences { + if resourceReference.Key.Type == api.ResourceType_PIPELINE && resourceReference.Relationship == api.Relationship_OWNER { + pipelineId = resourceReference.Key.Id + } + } + if len(pipelineId) == 0 { + return nil, util.Wrap(err, "Create pipeline version failed due to missing pipeline id") + } + + // Construct model.PipelineVersion + version := &model.PipelineVersion{ + Name: apiVersion.Name, + PipelineId: pipelineId, + Status: model.PipelineVersionCreating, + Parameters: params, + CodeSourceUrl: apiVersion.CodeSourceUrl, + } + version, err = r.pipelineStore.CreatePipelineVersion(version) + if err != nil { + return nil, util.Wrap(err, "Create pipeline version failed") + } + + // Store the pipeline file + err = r.objectStore.AddFile(pipelineFile, r.objectStore.GetPipelineKey(fmt.Sprint(version.UUID))) + if err != nil { + return nil, util.Wrap(err, "Create pipeline version failed") + } + + // After pipeline version being created in DB and pipeline file being + // saved in minio server, set this pieline version to status ready. + version.Status = model.PipelineVersionReady + err = r.pipelineStore.UpdatePipelineVersionStatus(version.UUID, version.Status) + if err != nil { + return nil, util.Wrap(err, "Create pipeline version failed") + } + + return version, nil +} + +func (r *ResourceManager) GetPipelineVersion(versionId string) (*model.PipelineVersion, error) { + return r.pipelineStore.GetPipelineVersion(versionId) +} + +func (r *ResourceManager) ListPipelineVersions(pipelineId string, opts *list.Options) (pipelines []*model.PipelineVersion, total_size int, nextPageToken string, err error) { + return r.pipelineStore.ListPipelineVersions(pipelineId, opts) +} + +func (r *ResourceManager) DeletePipelineVersion(pipelineVersionId string) error { + _, err := r.pipelineStore.GetPipelineVersion(pipelineVersionId) + if err != nil { + return util.Wrap(err, "Delete pipeline version failed") + } + + // Mark pipeline as deleting so it's not visible to user. + err = r.pipelineStore.UpdatePipelineVersionStatus(pipelineVersionId, model.PipelineVersionDeleting) + if err != nil { + return util.Wrap(err, "Delete pipeline version failed") + } + + err = r.objectStore.DeleteFile(r.objectStore.GetPipelineKey(fmt.Sprint(pipelineVersionId))) + if err != nil { + glog.Errorf("%v", errors.Wrapf(err, "Failed to delete pipeline file for pipeline version %v", pipelineVersionId)) + return util.Wrap(err, "Delete pipeline version failed") + } + err = r.pipelineStore.DeletePipelineVersion(pipelineVersionId) + if err != nil { + glog.Errorf("%v", errors.Wrapf(err, "Failed to delete pipeline DB entry for pipeline %v", pipelineVersionId)) + return util.Wrap(err, "Delete pipeline version failed") + } + + return nil +} + +func (r *ResourceManager) GetPipelineVersionTemplate(versionId string) ([]byte, error) { + // Verify pipeline version exist + _, err := r.pipelineStore.GetPipelineVersion(versionId) + if err != nil { + return nil, util.Wrap(err, "Get pipeline version template failed") + } + + template, err := r.objectStore.GetFile(r.objectStore.GetPipelineKey(fmt.Sprint(versionId))) + if err != nil { + return nil, util.Wrap(err, "Get pipeline version template failed") + } + + return template, nil +} + +func (r *ResourceManager) IsRequestAuthorized(userIdentity string, namespace string) (bool, error) { + return r.kfamClient.IsAuthorized(userIdentity, namespace) +} + +func (r *ResourceManager) GetNamespaceFromExperimentID(experimentID string) (string, error) { + experiment, err := r.GetExperiment(experimentID) + if err != nil { + return "", util.Wrap(err, "Failed to get namespace from experiment ID.") + } + return experiment.Namespace, nil +} + +func (r *ResourceManager) GetNamespaceFromRunID(runId string) (string, error) { + runDetail, err := r.GetRun(runId) + if err != nil { + return "", util.Wrap(err, "Failed to get namespace from run id.") + } + return runDetail.Namespace, nil +} + +func (r *ResourceManager) GetNamespaceFromJobID(jobId string) (string, error) { + job, err := r.GetJob(jobId) + if err != nil { + return "", util.Wrap(err, "Failed to get namespace from Job ID.") + } + return job.Namespace, nil +} + +func (r *ResourceManager) setDefaultServiceAccount(workflow *util.Workflow, serviceAccount string) { + if len(serviceAccount) > 0 { + workflow.SetServiceAccount(serviceAccount) + return + } + workflowServiceAccount := workflow.Spec.ServiceAccountName + if len(workflowServiceAccount) == 0 || workflowServiceAccount == defaultPipelineRunnerServiceAccount { + // To reserve SDK backward compatibility, the backend only replaces + // serviceaccount when it is empty or equal to default value set by SDK. + workflow.SetServiceAccount(r.getDefaultSA()) + } +} + +func (r *ResourceManager) getNamespaceFromExperiment(references []*api.ResourceReference) (string, error) { + experimentID := common.GetExperimentIDFromAPIResourceReferences(references) + experiment, err := r.GetExperiment(experimentID) + if err != nil { + return "", util.NewInternalServerError(err, "Failed to get experiment.") + } + + namespace := experiment.Namespace + if len(namespace) == 0 { + if common.IsMultiUserMode() { + return "", util.NewInternalServerError(errors.New("Missing namespace"), "Experiment %v doesn't have a namespace.", experiment.Name) + } else { + namespace = common.GetPodNamespace() + } + } + return namespace, nil } diff --git a/backend/src/apiserver/resource/resource_manager_test.go b/backend/src/apiserver/resource/resource_manager_test.go index 5416770adc7..c687f636c9f 100644 --- a/backend/src/apiserver/resource/resource_manager_test.go +++ b/backend/src/apiserver/resource/resource_manager_test.go @@ -20,24 +20,32 @@ import ( "testing" "time" - "encoding/json" - "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/storage" "github.com/kubeflow/pipelines/backend/src/common/util" swfapi "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" "github.com/pkg/errors" + "github.com/spf13/viper" "github.com/stretchr/testify/assert" "google.golang.org/grpc/codes" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/types" ) +func initEnvVars() { + viper.Set(common.PodNamespace, "ns1") +} + type FakeBadObjectStore struct{} +func (m *FakeBadObjectStore) GetPipelineKey(pipelineID string) string { + return pipelineID +} + func (m *FakeBadObjectStore) AddFile(template []byte, filePath string) error { return util.NewInternalServerError(errors.New("Error"), "bad object store") } @@ -60,13 +68,14 @@ func (m *FakeBadObjectStore) GetFromYamlFile(o interface{}, filePath string) err var testWorkflow = util.NewWorkflow(&v1alpha1.Workflow{ TypeMeta: v1.TypeMeta{APIVersion: "argoproj.io/v1alpha1", Kind: "Workflow"}, - ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "workflow1"}, + ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "workflow1", Namespace: "ns1"}, Spec: v1alpha1.WorkflowSpec{Arguments: v1alpha1.Arguments{Parameters: []v1alpha1.Parameter{{Name: "param1"}}}}, Status: v1alpha1.WorkflowStatus{Phase: v1alpha1.NodeRunning}, }) // Util function to create an initial state with pipeline uploaded func initWithPipeline(t *testing.T) (*FakeClientManager, *ResourceManager, *model.Pipeline) { + initEnvVars() store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) manager := NewResourceManager(store) p, err := manager.CreatePipeline("p1", "", []byte(testWorkflow.ToStringForStore())) @@ -75,19 +84,21 @@ func initWithPipeline(t *testing.T) (*FakeClientManager, *ResourceManager, *mode } func initWithExperiment(t *testing.T) (*FakeClientManager, *ResourceManager, *model.Experiment) { + initEnvVars() store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) manager := NewResourceManager(store) - experiment := &model.Experiment{Name: "e1"} - experiment, err := manager.CreateExperiment(experiment) + apiExperiment := &api.Experiment{Name: "e1"} + experiment, err := manager.CreateExperiment(apiExperiment) assert.Nil(t, err) return store, manager, experiment } func initWithExperimentAndPipeline(t *testing.T) (*FakeClientManager, *ResourceManager, *model.Experiment, *model.Pipeline) { + initEnvVars() store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) manager := NewResourceManager(store) - experiment := &model.Experiment{Name: "e1"} - experiment, err := manager.CreateExperiment(experiment) + apiExperiment := &api.Experiment{Name: "e1"} + experiment, err := manager.CreateExperiment(apiExperiment) assert.Nil(t, err) pipeline, err := manager.CreatePipeline("p1", "", []byte(testWorkflow.ToStringForStore())) assert.Nil(t, err) @@ -136,6 +147,28 @@ func initWithOneTimeRun(t *testing.T) (*FakeClientManager, *ResourceManager, *mo return store, manager, runDetail } +func initWithPatchedRun(t *testing.T) (*FakeClientManager, *ResourceManager, *model.RunDetail) { + store, manager, exp := initWithExperiment(t) + apiRun := &api.Run{ + Name: "run1", + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflow.ToStringForStore(), + Parameters: []*api.Parameter{ + {Name: "param1", Value: "{{kfp-default-bucket}}"}, + }, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: exp.UUID}, + Relationship: api.Relationship_OWNER, + }, + }, + } + runDetail, err := manager.CreateRun(apiRun) + assert.Nil(t, err) + return store, manager, runDetail +} + func initWithOneTimeFailedRun(t *testing.T) (*FakeClientManager, *ResourceManager, *model.RunDetail) { store, manager, exp := initWithExperiment(t) apiRun := &api.Run{ @@ -159,7 +192,8 @@ func initWithOneTimeFailedRun(t *testing.T) (*FakeClientManager, *ResourceManage updatedWorkflow.SetLabels(util.LabelKeyWorkflowRunId, runDetail.UUID) updatedWorkflow.Status.Phase = v1alpha1.NodeFailed updatedWorkflow.Status.Nodes = map[string]v1alpha1.NodeStatus{"node1": {Name: "pod1", Type: v1alpha1.NodeTypePod, Phase: v1alpha1.NodeFailed}} - manager.ReportWorkflowResource(updatedWorkflow) + err = manager.ReportWorkflowResource(updatedWorkflow) + assert.Nil(t, err) return store, manager, runDetail } @@ -252,7 +286,7 @@ func TestGetPipelineTemplate_PipelineMetadataNotFound(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) defer store.Close() template := []byte("workflow: foo") - store.ObjectStore().AddFile(template, storage.CreatePipelinePath(fmt.Sprint(1))) + store.objectStore.AddFile(template, store.objectStore.GetPipelineKey(fmt.Sprint(1))) manager := NewResourceManager(store) _, err := manager.GetPipelineTemplate("1") assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode()) @@ -272,9 +306,30 @@ func TestGetPipelineTemplate_PipelineFileNotFound(t *testing.T) { func TestCreateRun_ThroughPipelineID(t *testing.T) { store, manager, p := initWithPipeline(t) defer store.Close() - experiment := &model.Experiment{Name: "e1"} - experiment, err := manager.CreateExperiment(experiment) + apiExperiment := &api.Experiment{Name: "e1"} + experiment, err := manager.CreateExperiment(apiExperiment) assert.Nil(t, err) + + // Create a new pipeline version with UUID being FakeUUID. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + version, err := manager.CreatePipelineVersion(&api.PipelineVersion{ + Name: "version_for_run", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: p.UUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + // The pipeline specified via pipeline id will be converted to this + // pipeline's default version, which will be used to create run. apiRun := &api.Run{ Name: "run1", PipelineSpec: &api.PipelineSpec{ @@ -297,15 +352,19 @@ func TestCreateRun_ThroughPipelineID(t *testing.T) { expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{ {Name: "param1", Value: util.StringPointer("world")}} expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} + expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} expectedRuntimeWorkflow.Spec.ServiceAccountName = defaultPipelineRunnerServiceAccount expectedRunDetail := &model.RunDetail{ Run: model.Run{ UUID: "123e4567-e89b-12d3-a456-426655440000", + ExperimentUUID: experiment.UUID, DisplayName: "run1", Name: "workflow-name", + Namespace: "ns1", + ServiceAccount: "pipeline-runner", StorageState: api.Run_STORAGESTATE_AVAILABLE.String(), - CreatedAtInSec: 3, + CreatedAtInSec: 4, Conditions: "Running", PipelineSpec: model.PipelineSpec{ PipelineId: p.UUID, @@ -322,6 +381,14 @@ func TestCreateRun_ThroughPipelineID(t *testing.T) { ReferenceType: common.Experiment, Relationship: common.Owner, }, + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Run, + ReferenceUUID: version.UUID, + ReferenceName: version.Name, + ReferenceType: common.PipelineVersion, + Relationship: common.Creator, + }, }, }, PipelineRuntime: model.PipelineRuntime{ @@ -329,7 +396,7 @@ func TestCreateRun_ThroughPipelineID(t *testing.T) { }, } assert.Equal(t, expectedRunDetail, runDetail, "The CreateRun return has unexpected value.") - assert.Equal(t, 1, store.workflowClientFake.GetWorkflowCount(), "Workflow CRD is not created.") + assert.Equal(t, 1, store.ArgoClientFake.GetWorkflowCount(), "Workflow CRD is not created.") runDetail, err = manager.GetRun(runDetail.UUID) assert.Nil(t, err) assert.Equal(t, expectedRunDetail, runDetail, "CreateRun stored invalid data in database") @@ -337,16 +404,21 @@ func TestCreateRun_ThroughPipelineID(t *testing.T) { func TestCreateRun_ThroughWorkflowSpec(t *testing.T) { store, manager, runDetail := initWithOneTimeRun(t) + expectedExperimentUUID := runDetail.ExperimentUUID expectedRuntimeWorkflow := testWorkflow.DeepCopy() expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{ {Name: "param1", Value: util.StringPointer("world")}} expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} + expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} expectedRuntimeWorkflow.Spec.ServiceAccountName = defaultPipelineRunnerServiceAccount expectedRunDetail := &model.RunDetail{ Run: model.Run{ UUID: "123e4567-e89b-12d3-a456-426655440000", + ExperimentUUID: expectedExperimentUUID, DisplayName: "run1", Name: "workflow-name", + Namespace: "ns1", + ServiceAccount: "pipeline-runner", StorageState: api.Run_STORAGESTATE_AVAILABLE.String(), CreatedAtInSec: 2, Conditions: "Running", @@ -370,15 +442,164 @@ func TestCreateRun_ThroughWorkflowSpec(t *testing.T) { }, } assert.Equal(t, expectedRunDetail, runDetail, "The CreateRun return has unexpected value.") - assert.Equal(t, 1, store.workflowClientFake.GetWorkflowCount(), "Workflow CRD is not created.") + assert.Equal(t, 1, store.ArgoClientFake.GetWorkflowCount(), "Workflow CRD is not created.") + runDetail, err := manager.GetRun(runDetail.UUID) + assert.Nil(t, err) + assert.Equal(t, expectedRunDetail, runDetail, "CreateRun stored invalid data in database") +} + +func TestCreateRun_ThroughWorkflowSpecWithPatch(t *testing.T) { + viper.Set(HasDefaultBucketEnvVar, "true") + viper.Set(ProjectIDEnvVar, "test-project-id") + viper.Set(DefaultBucketNameEnvVar, "test-default-bucket") + store, manager, runDetail := initWithPatchedRun(t) + expectedExperimentUUID := runDetail.ExperimentUUID + expectedRuntimeWorkflow := testWorkflow.DeepCopy() + expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{ + {Name: "param1", Value: util.StringPointer("test-default-bucket")}} + expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} + expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} + expectedRuntimeWorkflow.Spec.ServiceAccountName = defaultPipelineRunnerServiceAccount + expectedRunDetail := &model.RunDetail{ + Run: model.Run{ + UUID: "123e4567-e89b-12d3-a456-426655440000", + ExperimentUUID: expectedExperimentUUID, + DisplayName: "run1", + Name: "workflow-name", + Namespace: "ns1", + ServiceAccount: "pipeline-runner", + StorageState: api.Run_STORAGESTATE_AVAILABLE.String(), + CreatedAtInSec: 2, + Conditions: "Running", + PipelineSpec: model.PipelineSpec{ + WorkflowSpecManifest: testWorkflow.ToStringForStore(), + Parameters: "[{\"name\":\"param1\",\"value\":\"test-default-bucket\"}]", + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Run, + ReferenceUUID: DefaultFakeUUID, + ReferenceName: "e1", + ReferenceType: common.Experiment, + Relationship: common.Owner, + }, + }, + }, + PipelineRuntime: model.PipelineRuntime{ + WorkflowRuntimeManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), + }, + } + assert.Equal(t, expectedRunDetail, runDetail, "The CreateRun return has unexpected value.") + assert.Equal(t, 1, store.ArgoClientFake.GetWorkflowCount(), "Workflow CRD is not created.") runDetail, err := manager.GetRun(runDetail.UUID) assert.Nil(t, err) assert.Equal(t, expectedRunDetail, runDetail, "CreateRun stored invalid data in database") } +func TestCreateRun_ThroughPipelineVersion(t *testing.T) { + // Create experiment, pipeline, and pipeline version. + store, manager, experiment, pipeline := initWithExperimentAndPipeline(t) + defer store.Close() + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + version, err := manager.CreatePipelineVersion(&api.PipelineVersion{ + Name: "version_for_run", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: pipeline.UUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + apiRun := &api.Run{ + Name: "run1", + PipelineSpec: &api.PipelineSpec{ + Parameters: []*api.Parameter{ + {Name: "param1", Value: "world"}, + }, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE_VERSION, Id: version.UUID}, + Relationship: api.Relationship_CREATOR, + }, + }, + ServiceAccount: "sa1", + } + runDetail, err := manager.CreateRun(apiRun) + assert.Nil(t, err) + + expectedRuntimeWorkflow := testWorkflow.DeepCopy() + expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{ + {Name: "param1", Value: util.StringPointer("world")}} + expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} + expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} + expectedRuntimeWorkflow.Spec.ServiceAccountName = "sa1" + + expectedRunDetail := &model.RunDetail{ + Run: model.Run{ + UUID: "123e4567-e89b-12d3-a456-426655440000", + ExperimentUUID: experiment.UUID, + DisplayName: "run1", + Name: "workflow-name", + Namespace: "ns1", + ServiceAccount: "sa1", + StorageState: api.Run_STORAGESTATE_AVAILABLE.String(), + CreatedAtInSec: 4, + Conditions: "Running", + PipelineSpec: model.PipelineSpec{ + WorkflowSpecManifest: testWorkflow.ToStringForStore(), + Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Run, + ReferenceUUID: experiment.UUID, + ReferenceName: "e1", + ReferenceType: common.Experiment, + Relationship: common.Owner, + }, + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Run, + ReferenceUUID: version.UUID, + ReferenceName: "version_for_run", + ReferenceType: common.PipelineVersion, + Relationship: common.Creator, + }, + }, + }, + PipelineRuntime: model.PipelineRuntime{ + WorkflowRuntimeManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), + }, + } + assert.Equal(t, expectedRunDetail, runDetail, "The CreateRun return has unexpected value.") + assert.Equal(t, 1, store.ArgoClientFake.GetWorkflowCount(), "Workflow CRD is not created.") + runDetail, err = manager.GetRun(runDetail.UUID) + assert.Nil(t, err) + assert.Equal(t, expectedRunDetail, runDetail, "CreateRun stored invalid data in database") +} + func TestCreateRun_NoExperiment(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() manager := NewResourceManager(store) + experimentID, err := manager.CreateDefaultExperiment() + experiment, err := manager.GetExperiment(experimentID) + assert.Equal(t, experiment.Name, "Default") + apiRun := &api.Run{ Name: "No experiment", PipelineSpec: &api.PipelineSpec{ @@ -464,7 +685,7 @@ func TestCreateRun_CreateWorkflowError(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) defer store.Close() manager := NewResourceManager(store) - manager.workflowClient = &FakeBadWorkflowClient{} + manager.argoClient = client.NewFakeArgoClientWithBadWorkflow() apiRun := &api.Run{ Name: "run1", PipelineSpec: &api.PipelineSpec{ @@ -522,7 +743,7 @@ func TestDeleteRun_CrdFailure(t *testing.T) { store, manager, runDetail := initWithOneTimeRun(t) defer store.Close() - manager.workflowClient = &FakeBadWorkflowClient{} + manager.argoClient = client.NewFakeArgoClientWithBadWorkflow() err := manager.DeleteRun(runDetail.UUID) //assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) //assert.Contains(t, err.Error(), "some error") @@ -588,7 +809,7 @@ func TestDeleteExperiment_CrdFailure(t *testing.T) { store, manager, experiment := initWithExperiment(t) defer store.Close() - manager.workflowClient = &FakeBadWorkflowClient{} + manager.argoClient = client.NewFakeArgoClientWithBadWorkflow() err := manager.DeleteExperiment(experiment.UUID) assert.Nil(t, err) } @@ -614,7 +835,7 @@ func TestTerminateRun(t *testing.T) { assert.Nil(t, err) assert.Equal(t, "Terminating", actualRunDetail.Conditions) - isTerminated, err := store.workflowClientFake.isTerminated(runDetail.Run.Name) + isTerminated, err := store.ArgoClientFake.IsTerminated(runDetail.Run.Name) assert.Nil(t, err) assert.True(t, isTerminated) } @@ -667,7 +888,7 @@ func TestRetryRun_FailedDeletePods(t *testing.T) { store, manager, runDetail := initWithOneTimeFailedRun(t) defer store.Close() - manager.podClient = FakeBadPodClient{} + manager.k8sCoreClient = client.NewFakeKubernetesCoreClientWithBadPodClient() err := manager.RetryRun(runDetail.UUID) assert.NotNil(t, err) assert.Contains(t, err.Error(), "failed to delete pod") @@ -677,7 +898,7 @@ func TestRetryRun_UpdateAndCreateFailed(t *testing.T) { store, manager, runDetail := initWithOneTimeFailedRun(t) defer store.Close() - manager.workflowClient = &FakeBadWorkflowClient{} + manager.argoClient = client.NewFakeArgoClientWithBadWorkflow() err := manager.RetryRun(runDetail.UUID) assert.NotNil(t, err) assert.Contains(t, err.Error(), "Failed to create or update the run") @@ -687,10 +908,11 @@ func TestCreateJob_ThroughWorkflowSpec(t *testing.T) { store, _, job := initWithJob(t) defer store.Close() expectedJob := &model.Job{ - UUID: "123", + UUID: "123e4567-e89b-12d3-a456-426655440000", DisplayName: "j1", Name: "j1", - Namespace: "default", + Namespace: "ns1", + ServiceAccount: "pipeline-runner", Enabled: true, CreatedAtInSec: 2, UpdatedAtInSec: 2, @@ -700,7 +922,7 @@ func TestCreateJob_ThroughWorkflowSpec(t *testing.T) { }, ResourceReferences: []*model.ResourceReference{ { - ResourceUUID: "123", + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", ResourceType: common.Job, ReferenceUUID: DefaultFakeUUID, ReferenceName: "e1", @@ -715,8 +937,8 @@ func TestCreateJob_ThroughWorkflowSpec(t *testing.T) { func TestCreateJob_ThroughPipelineID(t *testing.T) { store, manager, pipeline := initWithPipeline(t) defer store.Close() - experiment := &model.Experiment{Name: "e1"} - experiment, err := manager.CreateExperiment(experiment) + apiExperiment := &api.Experiment{Name: "e1"} + experiment, err := manager.CreateExperiment(apiExperiment) job := &api.Job{ Name: "j1", Enabled: true, @@ -733,15 +955,37 @@ func TestCreateJob_ThroughPipelineID(t *testing.T) { }, }, } + + // Create a new pipeline version with UUID being FakeUUID. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + version, err := manager.CreatePipelineVersion(&api.PipelineVersion{ + Name: "version_for_run", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: pipeline.UUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + // The pipeline specified via pipeline id will be converted to this + // pipeline's default version, which will be used to create run. newJob, err := manager.CreateJob(job) expectedJob := &model.Job{ - UUID: "123", + UUID: "123e4567-e89b-12d3-a456-426655440000", DisplayName: "j1", Name: "j1", - Namespace: "default", + Namespace: "ns1", + ServiceAccount: "pipeline-runner", Enabled: true, - CreatedAtInSec: 3, - UpdatedAtInSec: 3, + CreatedAtInSec: 4, + UpdatedAtInSec: 4, Conditions: "NO_STATUS", PipelineSpec: model.PipelineSpec{ PipelineId: pipeline.UUID, @@ -751,13 +995,99 @@ func TestCreateJob_ThroughPipelineID(t *testing.T) { }, ResourceReferences: []*model.ResourceReference{ { - ResourceUUID: "123", + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", ResourceType: common.Job, ReferenceUUID: experiment.UUID, ReferenceName: "e1", ReferenceType: common.Experiment, Relationship: common.Owner, }, + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Job, + ReferenceUUID: version.UUID, + ReferenceName: version.Name, + ReferenceType: common.PipelineVersion, + Relationship: common.Creator, + }, + }, + } + assert.Nil(t, err) + assert.Equal(t, expectedJob, newJob) +} + +func TestCreateJob_ThroughPipelineVersion(t *testing.T) { + // Create experiment, pipeline and pipeline version. + store, manager, experiment, pipeline := initWithExperimentAndPipeline(t) + defer store.Close() + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + version, err := manager.CreatePipelineVersion(&api.PipelineVersion{ + Name: "version_for_job", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: pipeline.UUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + job := &api.Job{ + Name: "j1", + Enabled: true, + PipelineSpec: &api.PipelineSpec{ + Parameters: []*api.Parameter{ + {Name: "param1", Value: "world"}, + }, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE_VERSION, Id: version.UUID}, + Relationship: api.Relationship_CREATOR, + }, + }, + } + newJob, err := manager.CreateJob(job) + expectedJob := &model.Job{ + UUID: "123e4567-e89b-12d3-a456-426655440000", + DisplayName: "j1", + Name: "j1", + Namespace: "ns1", + ServiceAccount: "pipeline-runner", + Enabled: true, + CreatedAtInSec: 4, + UpdatedAtInSec: 4, + Conditions: "NO_STATUS", + PipelineSpec: model.PipelineSpec{ + WorkflowSpecManifest: testWorkflow.ToStringForStore(), + Parameters: "[{\"name\":\"param1\",\"value\":\"world\"}]", + }, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Job, + ReferenceUUID: experiment.UUID, + ReferenceName: "e1", + ReferenceType: common.Experiment, + Relationship: common.Owner, + }, + { + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", + ResourceType: common.Job, + ReferenceUUID: version.UUID, + ReferenceName: "version_for_job", + ReferenceType: common.PipelineVersion, + Relationship: common.Creator, + }, }, } assert.Nil(t, err) @@ -822,7 +1152,7 @@ func TestCreateJob_ExtraInputParameterError(t *testing.T) { func TestCreateJob_FailedToCreateScheduleWorkflow(t *testing.T) { store, manager, p := initWithPipeline(t) defer store.Close() - manager.scheduledWorkflowClient = &FakeBadScheduledWorkflowClient{} + manager.swfClient = client.NewFakeSwfClientWithBadWorkflow() job := &api.Job{ Name: "pp1", Enabled: true, @@ -839,10 +1169,11 @@ func TestEnableJob(t *testing.T) { err := manager.EnableJob(job.UUID, false) job, err = manager.GetJob(job.UUID) expectedJob := &model.Job{ - UUID: "123", + UUID: "123e4567-e89b-12d3-a456-426655440000", DisplayName: "j1", Name: "j1", - Namespace: "default", + Namespace: "ns1", + ServiceAccount: "pipeline-runner", Enabled: false, CreatedAtInSec: 2, UpdatedAtInSec: 3, @@ -852,7 +1183,7 @@ func TestEnableJob(t *testing.T) { }, ResourceReferences: []*model.ResourceReference{ { - ResourceUUID: "123", + ResourceUUID: "123e4567-e89b-12d3-a456-426655440000", ResourceType: common.Job, ReferenceUUID: DefaultFakeUUID, ReferenceName: "e1", @@ -877,7 +1208,7 @@ func TestEnableJob_JobNotExist(t *testing.T) { func TestEnableJob_CrdFailure(t *testing.T) { store, manager, job := initWithJob(t) defer store.Close() - manager.scheduledWorkflowClient = &FakeBadScheduledWorkflowClient{} + manager.swfClient = client.NewFakeSwfClientWithBadWorkflow() err := manager.EnableJob(job.UUID, false) assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) assert.Contains(t, err.Error(), "Check job exist failed: some error") @@ -900,7 +1231,7 @@ func TestDeleteJob(t *testing.T) { _, err = manager.GetJob(job.UUID) assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode()) - assert.Contains(t, err.Error(), "Job 123 not found") + assert.Contains(t, err.Error(), fmt.Sprintf("Job %v not found", job.UUID)) } func TestDeleteJob_JobNotExist(t *testing.T) { @@ -916,7 +1247,7 @@ func TestDeleteJob_CrdFailure(t *testing.T) { store, manager, job := initWithJob(t) defer store.Close() - manager.scheduledWorkflowClient = &FakeBadScheduledWorkflowClient{} + manager.swfClient = client.NewFakeSwfClientWithBadWorkflow() err := manager.DeleteJob(job.UUID) assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) assert.Contains(t, err.Error(), "Check job exist failed: some error") @@ -934,12 +1265,14 @@ func TestDeleteJob_DbFailure(t *testing.T) { func TestReportWorkflowResource_ScheduledWorkflowIDEmpty_Success(t *testing.T) { store, manager, run := initWithOneTimeRun(t) + expectedExperimentUUID := run.ExperimentUUID defer store.Close() // report workflow workflow := util.NewWorkflow(&v1alpha1.Workflow{ ObjectMeta: v1.ObjectMeta{ - UID: types.UID(run.UUID), - Labels: map[string]string{util.LabelKeyWorkflowRunId: run.UUID}, + UID: types.UID(run.UUID), + Labels: map[string]string{util.LabelKeyWorkflowRunId: run.UUID}, + Namespace: "ns1", }, Status: v1alpha1.WorkflowStatus{Phase: v1alpha1.NodeRunning}, }) @@ -949,8 +1282,11 @@ func TestReportWorkflowResource_ScheduledWorkflowIDEmpty_Success(t *testing.T) { assert.Nil(t, err) expectedRun := model.Run{ UUID: "123e4567-e89b-12d3-a456-426655440000", + ExperimentUUID: expectedExperimentUUID, DisplayName: "run1", Name: "workflow-name", + Namespace: "ns1", + ServiceAccount: "pipeline-runner", StorageState: api.Run_STORAGESTATE_AVAILABLE.String(), CreatedAtInSec: 2, Conditions: "Running", @@ -1012,14 +1348,6 @@ func TestReportWorkflowResource_ScheduledWorkflowIDNotEmpty_Success(t *testing.T WorkflowSpecManifest: workflow.GetWorkflowSpec().ToStringForStore(), }, ResourceReferences: []*model.ResourceReference{ - { - ResourceUUID: "WORKFLOW_1", - ResourceType: common.Run, - ReferenceUUID: job.UUID, - ReferenceName: job.Name, - ReferenceType: common.Job, - Relationship: common.Creator, - }, { ResourceUUID: "WORKFLOW_1", ResourceType: common.Run, @@ -1028,6 +1356,14 @@ func TestReportWorkflowResource_ScheduledWorkflowIDNotEmpty_Success(t *testing.T ReferenceType: common.Experiment, Relationship: common.Owner, }, + { + ResourceUUID: "WORKFLOW_1", + ResourceType: common.Run, + ReferenceUUID: job.UUID, + ReferenceName: job.Name, + ReferenceType: common.Job, + Relationship: common.Creator, + }, }, }, PipelineRuntime: model.PipelineRuntime{WorkflowRuntimeManifest: workflow.ToStringForStore()}, @@ -1085,14 +1421,6 @@ func TestReportWorkflowResource_ScheduledWorkflowIDNotEmpty_NoExperiment_Success WorkflowSpecManifest: workflow.GetWorkflowSpec().ToStringForStore(), }, ResourceReferences: []*model.ResourceReference{ - { - ResourceUUID: "WORKFLOW_1", - ResourceType: common.Run, - ReferenceUUID: newJob.UUID, - ReferenceName: newJob.Name, - ReferenceType: common.Job, - Relationship: common.Creator, - }, { ResourceUUID: "WORKFLOW_1", ResourceType: common.Run, @@ -1101,6 +1429,14 @@ func TestReportWorkflowResource_ScheduledWorkflowIDNotEmpty_NoExperiment_Success ReferenceType: common.Experiment, Relationship: common.Owner, }, + { + ResourceUUID: "WORKFLOW_1", + ResourceType: common.Run, + ReferenceUUID: newJob.UUID, + ReferenceName: newJob.Name, + ReferenceType: common.Job, + Relationship: common.Creator, + }, }, }, PipelineRuntime: model.PipelineRuntime{WorkflowRuntimeManifest: workflow.ToStringForStore()}, @@ -1111,20 +1447,22 @@ func TestReportWorkflowResource_ScheduledWorkflowIDNotEmpty_NoExperiment_Success func TestReportWorkflowResource_WorkflowCompleted(t *testing.T) { store, manager, run := initWithOneTimeRun(t) + namespace := "kubeflow" defer store.Close() // report workflow workflow := util.NewWorkflow(&v1alpha1.Workflow{ ObjectMeta: v1.ObjectMeta{ - Name: run.Name, - UID: types.UID(run.UUID), - Labels: map[string]string{util.LabelKeyWorkflowRunId: run.UUID}, + Name: run.Name, + Namespace: namespace, + UID: types.UID(run.UUID), + Labels: map[string]string{util.LabelKeyWorkflowRunId: run.UUID}, }, Status: v1alpha1.WorkflowStatus{Phase: v1alpha1.NodeFailed}, }) err := manager.ReportWorkflowResource(workflow) assert.Nil(t, err) - wf, err := store.workflowClientFake.Get(run.Run.Name, v1.GetOptions{}) + wf, err := store.ArgoClientFake.Workflow(namespace).Get(run.Run.Name, v1.GetOptions{}) assert.Nil(t, err) assert.Equal(t, wf.Labels[util.LabelKeyWorkflowPersistedFinalState], "true") } @@ -1135,9 +1473,10 @@ func TestReportWorkflowResource_WorkflowCompleted_FinalStatePersisted(t *testing // report workflow workflow := util.NewWorkflow(&v1alpha1.Workflow{ ObjectMeta: v1.ObjectMeta{ - Name: run.Name, - UID: types.UID(run.UUID), - Labels: map[string]string{util.LabelKeyWorkflowRunId: run.UUID, util.LabelKeyWorkflowPersistedFinalState: "true"}, + Name: run.Name, + Namespace: "ns1", + UID: types.UID(run.UUID), + Labels: map[string]string{util.LabelKeyWorkflowRunId: run.UUID, util.LabelKeyWorkflowPersistedFinalState: "true"}, }, Status: v1alpha1.WorkflowStatus{Phase: v1alpha1.NodeFailed}, }) @@ -1147,14 +1486,15 @@ func TestReportWorkflowResource_WorkflowCompleted_FinalStatePersisted(t *testing func TestReportWorkflowResource_WorkflowCompleted_FinalStatePersisted_DeleteFailed(t *testing.T) { store, manager, run := initWithOneTimeRun(t) - manager.workflowClient = &FakeBadWorkflowClient{} + manager.argoClient = client.NewFakeArgoClientWithBadWorkflow() defer store.Close() // report workflow workflow := util.NewWorkflow(&v1alpha1.Workflow{ ObjectMeta: v1.ObjectMeta{ - Name: run.Name, - UID: types.UID(run.UUID), - Labels: map[string]string{util.LabelKeyWorkflowRunId: run.UUID, util.LabelKeyWorkflowPersistedFinalState: "true"}, + Name: run.Name, + Namespace: "ns1", + UID: types.UID(run.UUID), + Labels: map[string]string{util.LabelKeyWorkflowRunId: run.UUID, util.LabelKeyWorkflowPersistedFinalState: "true"}, }, Status: v1alpha1.WorkflowStatus{Phase: v1alpha1.NodeFailed}, }) @@ -1181,12 +1521,13 @@ func TestReportScheduledWorkflowResource_Success(t *testing.T) { assert.Nil(t, err) expectedJob := &model.Job{ - Name: "MY_NAME", - DisplayName: "j1", - Namespace: "MY_NAMESPACE", - Enabled: false, - UUID: job.UUID, - Conditions: "NO_STATUS", + Name: "MY_NAME", + DisplayName: "j1", + Namespace: "MY_NAMESPACE", + ServiceAccount: "pipeline-runner", + Enabled: false, + UUID: job.UUID, + Conditions: "NO_STATUS", Trigger: model.Trigger{ CronSchedule: model.CronSchedule{ Cron: util.StringPointer(""), @@ -1252,38 +1593,6 @@ func TestReportScheduledWorkflowResource_Error(t *testing.T) { assert.Contains(t, err.(*util.UserError).String(), "database is closed") } -func TestGetWorkflowSpecBytes_ByPipelineID(t *testing.T) { - store, manager, pipeline := initWithPipeline(t) - defer store.Close() - spec := &api.PipelineSpec{ - PipelineId: pipeline.UUID, - Parameters: []*api.Parameter{ - {Name: "param1", Value: "world"}, - }, - } - workflowBytes, err := manager.getWorkflowSpecBytes(spec) - assert.Nil(t, err) - var actualWorkflow v1alpha1.Workflow - json.Unmarshal(workflowBytes, &actualWorkflow) - assert.Equal(t, testWorkflow.Get(), &actualWorkflow) -} - -func TestGetWorkflowSpecBytes_ByPipelineID_NotExist(t *testing.T) { - store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) - defer store.Close() - manager := NewResourceManager(store) - - spec := &api.PipelineSpec{ - PipelineId: "1", - Parameters: []*api.Parameter{ - {Name: "param1", Value: "world"}, - }, - } - _, err := manager.getWorkflowSpecBytes(spec) - assert.NotNil(t, err) - assert.Contains(t, err.Error(), "not found") -} - func TestGetWorkflowSpecBytes_ByWorkflowManifest(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) defer store.Close() @@ -1295,7 +1604,7 @@ func TestGetWorkflowSpecBytes_ByWorkflowManifest(t *testing.T) { {Name: "param1", Value: "world"}, }, } - workflowBytes, err := manager.getWorkflowSpecBytes(spec) + workflowBytes, err := manager.getWorkflowSpecBytesFromPipelineSpec(spec) assert.Nil(t, err) assert.Equal(t, []byte("some manifest"), workflowBytes) } @@ -1310,7 +1619,7 @@ func TestGetWorkflowSpecBytes_MissingSpec(t *testing.T) { {Name: "param1", Value: "world"}, }, } - _, err := manager.getWorkflowSpecBytes(spec) + _, err := manager.getWorkflowSpecBytesFromPipelineSpec(spec) assert.NotNil(t, err) assert.Contains(t, err.Error(), "Please provide a valid pipeline spec") } @@ -1759,3 +2068,311 @@ spec: valueFrom: path: /output.txt` ) + +func TestCreatePipelineVersion(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + manager := NewResourceManager(store) + + // Create a pipeline before versions. + _, err := manager.CreatePipeline("p", "", []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + version, err := manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "p_v", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + defer store.Close() + pipelineVersionExpected := &model.PipelineVersion{ + UUID: FakeUUIDOne, + CreatedAtInSec: 2, + Name: "p_v", + Parameters: "[{\"name\":\"param1\"}]", + Status: model.PipelineVersionReady, + PipelineId: DefaultFakeUUID, + } + assert.Equal(t, pipelineVersionExpected, version) +} + +func TestCreatePipelineVersion_ComplexPipelineVersion(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + createdPipeline, err := manager.CreatePipeline("pipeline", "", []byte(strings.TrimSpace(complexPipeline))) + assert.Nil(t, err) + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + version, err := manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte(strings.TrimSpace(complexPipeline))) + assert.Nil(t, err) + + _, err = manager.GetPipeline(createdPipeline.UUID) + assert.Nil(t, err) + + _, err = manager.GetPipelineVersion(version.UUID) + assert.Nil(t, err) +} + +func TestCreatePipelineVersion_CreatePipelineVersionFileError(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + _, err := manager.CreatePipeline("pipeline", "", []byte(strings.TrimSpace(complexPipeline))) + assert.Nil(t, err) + + // Switch to a bad object store + manager.objectStore = &FakeBadObjectStore{} + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + _, err = manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "bad object store") + + // Verify the pipeline version in DB is in status PipelineVersionCreating. + version, err := manager.pipelineStore.GetPipelineVersionWithStatus(FakeUUIDOne, model.PipelineVersionCreating) + assert.Nil(t, err) + assert.NotNil(t, version) +} + +func TestCreatePipelineVersion_GetParametersError(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + _, err := manager.CreatePipeline("pipeline", "", []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + _, err = manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("I am invalid yaml")) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "Failed to parse the parameter") +} + +func TestCreatePipelineVersion_StorePipelineVersionMetadataError(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + _, err := manager.CreatePipeline( + "pipeline", + "", + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Nil(t, err) + + // Close db. + store.DB().Close() + + // Create a version under the above pipeline, resulting in error because of + // closed db. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal( + FakeUUIDOne, nil)) + _, err = manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "database is closed") +} + +func TestDeletePipelineVersion(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + _, err := manager.CreatePipeline("pipeline", "", []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Nil(t, err) + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + _, err = manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + + // Delete the above pipeline_version. + err = manager.DeletePipelineVersion(FakeUUIDOne) + assert.Nil(t, err) + + // Verify the version doesn't exist. + _, err = manager.GetPipelineVersion(FakeUUIDOne) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode()) +} + +func TestDeletePipelineVersion_FileError(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + // Create a pipeline. + _, err := manager.CreatePipeline("pipeline", "", []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Nil(t, err) + + // Create a version under the above pipeline. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + _, err = manager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + + // Switch to a bad object store + manager.objectStore = &FakeBadObjectStore{} + + // Delete the above pipeline_version. + err = manager.DeletePipelineVersion(FakeUUIDOne) + assert.NotNil(t, err) + + // Verify the version in deleting status. + version, err := manager.pipelineStore.GetPipelineVersionWithStatus(FakeUUIDOne, model.PipelineVersionDeleting) + assert.Nil(t, err) + assert.NotNil(t, version) +} + +func TestCreateDefaultExperiment(t *testing.T) { + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + experimentID, err := manager.CreateDefaultExperiment() + assert.Nil(t, err) + experiment, err := manager.GetExperiment(experimentID) + assert.Nil(t, err) + + expectedExperiment := &model.Experiment{ + UUID: DefaultFakeUUID, + CreatedAtInSec: 1, + Name: "Default", + Description: "All runs created without specifying an experiment will be grouped here.", + Namespace: "", + StorageState: "STORAGESTATE_AVAILABLE", + } + assert.Equal(t, expectedExperiment, experiment) +} + +func TestCreateDefaultExperiment_MultiUser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + defer store.Close() + manager := NewResourceManager(store) + + experimentID, err := manager.CreateDefaultExperiment() + assert.Nil(t, err) + experiment, err := manager.GetExperiment(experimentID) + assert.Nil(t, err) + + expectedExperiment := &model.Experiment{ + UUID: DefaultFakeUUID, + CreatedAtInSec: 1, + Name: "Default", + Description: "All runs created without specifying an experiment will be grouped here.", + Namespace: "", + StorageState: "STORAGESTATE_AVAILABLE", + } + assert.Equal(t, expectedExperiment, experiment) +} diff --git a/backend/src/apiserver/resource/resource_manager_util.go b/backend/src/apiserver/resource/resource_manager_util.go index 406e6b1cb9f..5019596cd4e 100644 --- a/backend/src/apiserver/resource/resource_manager_util.go +++ b/backend/src/apiserver/resource/resource_manager_util.go @@ -16,18 +16,22 @@ package resource import ( "errors" + "fmt" + "regexp" + "strings" + "time" + wfv1 "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" "github.com/argoproj/argo/workflow/common" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/client" + servercommon "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/common/util" scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" apierr "k8s.io/apimachinery/pkg/api/errors" - "k8s.io/apimachinery/pkg/apis/meta/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - corev1 "k8s.io/client-go/kubernetes/typed/core/v1" - "regexp" - "strings" - "time" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) func toCRDTrigger(apiTrigger *api.Trigger) *scheduledworkflow.Trigger { @@ -171,12 +175,93 @@ func formulateRetryWorkflow(wf *util.Workflow) (*util.Workflow, []string, error) return util.NewWorkflow(newWF), podsToDelete, nil } -func deletePods(podClient corev1.PodInterface, podsToDelete []string) error { +func deletePods(k8sCoreClient client.KubernetesCoreInterface, podsToDelete []string, namespace string) error { for _, podId := range podsToDelete { - err := podClient.Delete(podId, &metav1.DeleteOptions{}) + err := k8sCoreClient.PodClient(namespace).Delete(podId, &metav1.DeleteOptions{}) if err != nil && !apierr.IsNotFound(err) { return util.NewInternalServerError(err, "Failed to delete pods.") } } return nil } + +// Mutate default values of specified pipeline spec. +// Args: +// text: (part of) pipeline file in string. +func PatchPipelineDefaultParameter(text string) (string, error) { + defaultBucket := servercommon.GetStringConfig(DefaultBucketNameEnvVar) + projectId := servercommon.GetStringConfig(ProjectIDEnvVar) + toPatch := map[string]string{ + "{{kfp-default-bucket}}": defaultBucket, + "{{kfp-project-id}}": projectId, + } + for key, value := range toPatch { + text = strings.Replace(text, key, value, -1) + } + return text, nil +} + +// Patch the system-specified default parameters if available. +func OverrideParameterWithSystemDefault(workflow util.Workflow, apiRun *api.Run) error { + // Patch the default value to workflow spec. + if servercommon.GetBoolConfigWithDefault(HasDefaultBucketEnvVar, false) { + patchedSlice := make([]wfv1.Parameter, 0) + for _, currentParam := range workflow.Spec.Arguments.Parameters { + if currentParam.Value != nil { + desiredValue, err := PatchPipelineDefaultParameter(*currentParam.Value) + if err != nil { + return fmt.Errorf("failed to patch default value to pipeline. Error: %v", err) + } + patchedSlice = append(patchedSlice, wfv1.Parameter{ + Name: currentParam.Name, + Value: util.StringPointer(desiredValue), + }) + } else if currentParam.Default != nil { + desiredValue, err := PatchPipelineDefaultParameter(*currentParam.Default) + if err != nil { + return fmt.Errorf("failed to patch default value to pipeline. Error: %v", err) + } + patchedSlice = append(patchedSlice, wfv1.Parameter{ + Name: currentParam.Name, + Value: util.StringPointer(desiredValue), + }) + } + } + workflow.Spec.Arguments.Parameters = patchedSlice + + // Patched the default value to apiRun + for _, param := range apiRun.PipelineSpec.Parameters { + var err error + param.Value, err = PatchPipelineDefaultParameter(param.Value) + if err != nil { + return fmt.Errorf("failed to patch default value to pipeline. Error: %v", err) + } + } + } + return nil +} + +// Convert PipelineId in PipelineSpec to the pipeline's default pipeline version. +// This is for legacy usage of pipeline id to create run. The standard way to +// create run is by specifying the pipeline version. +func ConvertPipelineIdToDefaultPipelineVersion(pipelineSpec *api.PipelineSpec, resourceReferences *[]*api.ResourceReference, r *ResourceManager) error { + if pipelineSpec.GetPipelineId() == "" { + return nil + } + // If there is already a pipeline version in resource references, don't convert pipeline id. + for _, reference := range *resourceReferences { + if reference.Key.Type == api.ResourceType_PIPELINE_VERSION && reference.Relationship == api.Relationship_CREATOR { + return nil + } + } + pipeline, err := r.pipelineStore.GetPipelineWithStatus(pipelineSpec.GetPipelineId(), model.PipelineReady) + if err != nil { + return util.Wrap(err, "Failed to find the specified pipeline") + } + // Add default pipeline version to resource references + *resourceReferences = append(*resourceReferences, &api.ResourceReference{ + Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE_VERSION, Id: pipeline.DefaultVersionId}, + Relationship: api.Relationship_CREATOR, + }) + return nil +} diff --git a/backend/src/apiserver/resource/resource_manager_util_test.go b/backend/src/apiserver/resource/resource_manager_util_test.go index 8d94f2ac337..335b4667ac4 100644 --- a/backend/src/apiserver/resource/resource_manager_util_test.go +++ b/backend/src/apiserver/resource/resource_manager_util_test.go @@ -15,16 +15,18 @@ package resource import ( - "github.com/ghodss/yaml" - "github.com/kubeflow/pipelines/backend/src/common/util" "testing" "time" + "github.com/ghodss/yaml" + "github.com/kubeflow/pipelines/backend/src/apiserver/storage" + "github.com/kubeflow/pipelines/backend/src/common/util" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "github.com/golang/protobuf/ptypes/timestamp" api "github.com/kubeflow/pipelines/backend/api/go_client" scheduledworkflow "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" "github.com/stretchr/testify/assert" - "k8s.io/apimachinery/pkg/apis/meta/v1" ) func TestToSwfCRDResourceGeneratedName_SpecialCharsAndSpace(t *testing.T) { @@ -235,16 +237,16 @@ status: ` var workflow util.Workflow - err := yaml.Unmarshal([]byte( wf), &workflow) + err := yaml.Unmarshal([]byte(wf), &workflow) assert.Nil(t, err) newWf, nodes, err := formulateRetryWorkflow(&workflow) newWfString, err := yaml.Marshal(newWf) assert.Nil(t, err) - assert.Equal(t, []string{"resubmit-hl9ft-3879090716"},nodes) + assert.Equal(t, []string{"resubmit-hl9ft-3879090716"}, nodes) expectedNewWfString := - `apiVersion: argoproj.io/v1alpha1 + `apiVersion: argoproj.io/v1alpha1 kind: Workflow metadata: creationTimestamp: "2019-08-02T07:15:14Z" @@ -319,7 +321,125 @@ status: startedAt: "2019-08-02T07:15:14Z" ` - assert.Equal(t, expectedNewWfString,string(newWfString)) + assert.Equal(t, expectedNewWfString, string(newWfString)) +} + +func TestConvertPipelineIdToDefaultPipelineVersion(t *testing.T) { + store, manager, experiment, pipeline := initWithExperimentAndPipeline(t) + defer store.Close() + // Create a new pipeline version with UUID being FakeUUID. + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + _, err := manager.CreatePipelineVersion(&api.PipelineVersion{ + Name: "version_for_run", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: pipeline.UUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + + // Create a run of the latest pipeline version, but by specifying the pipeline id. + apiRun := &api.Run{ + Name: "run1", + PipelineSpec: &api.PipelineSpec{ + PipelineId: pipeline.UUID, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER, + }, + }, + } + expectedApiRun := &api.Run{ + Name: "run1", + PipelineSpec: &api.PipelineSpec{ + PipelineId: pipeline.UUID, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE_VERSION, Id: FakeUUIDOne}, + Relationship: api.Relationship_CREATOR, + }, + }, + } + err = ConvertPipelineIdToDefaultPipelineVersion(apiRun.PipelineSpec, &apiRun.ResourceReferences, manager) + assert.Nil(t, err) + assert.Equal(t, expectedApiRun, apiRun) } +// No conversion if a pipeline version already exists in resource references. +func TestConvertPipelineIdToDefaultPipelineVersion_NoOp(t *testing.T) { + store, manager, experiment, pipeline := initWithExperimentAndPipeline(t) + defer store.Close() + // Create a new pipeline version with UUID being FakeUUID. + oldVersionId := pipeline.DefaultVersionId + pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) + assert.True(t, ok) + pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) + _, err := manager.CreatePipelineVersion(&api.PipelineVersion{ + Name: "version_for_run", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: pipeline.UUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, []byte(testWorkflow.ToStringForStore())) + assert.Nil(t, err) + // FakeUUID is the new default version's id. + assert.NotEqual(t, oldVersionId, FakeUUIDOne) + + // Create a run by specifying both the old pipeline version and the pipeline. + // As a result, the old version will be used and the pipeline id will be ignored. + apiRun := &api.Run{ + Name: "run1", + PipelineSpec: &api.PipelineSpec{ + PipelineId: pipeline.UUID, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE_VERSION, Id: oldVersionId}, + Relationship: api.Relationship_CREATOR, + }, + }, + } + expectedApiRun := &api.Run{ + Name: "run1", + PipelineSpec: &api.PipelineSpec{ + PipelineId: pipeline.UUID, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE_VERSION, Id: oldVersionId}, + Relationship: api.Relationship_CREATOR, + }, + }, + } + err = ConvertPipelineIdToDefaultPipelineVersion(apiRun.PipelineSpec, &apiRun.ResourceReferences, manager) + assert.Nil(t, err) + assert.Equal(t, expectedApiRun, apiRun) +} diff --git a/backend/src/apiserver/server/BUILD.bazel b/backend/src/apiserver/server/BUILD.bazel index 3ffdcea7483..b26a2794b30 100644 --- a/backend/src/apiserver/server/BUILD.bazel +++ b/backend/src/apiserver/server/BUILD.bazel @@ -4,6 +4,7 @@ go_library( name = "go_default_library", srcs = [ "api_converter.go", + "auth_server.go", "experiment_server.go", "job_server.go", "list_request_util.go", @@ -20,6 +21,7 @@ go_library( visibility = ["//visibility:public"], deps = [ "//backend/api:go_default_library", + "//backend/src/apiserver/client:go_default_library", "//backend/src/apiserver/common:go_default_library", "//backend/src/apiserver/list:go_default_library", "//backend/src/apiserver/model:go_default_library", @@ -27,15 +29,17 @@ go_library( "//backend/src/common/util:go_default_library", "//backend/src/crd/pkg/apis/scheduledworkflow/v1beta1:go_default_library", "@com_github_argoproj_argo//pkg/apis/workflow/v1alpha1:go_default_library", - "@com_github_cenkalti_backoff//:go_default_library", "@com_github_golang_glog//:go_default_library", "@com_github_golang_protobuf//jsonpb:go_default_library_gen", + "@com_github_pkg_errors//:go_default_library", "@com_github_robfig_cron//:go_default_library", + "@com_github_spf13_viper//:go_default_library", "@com_github_stretchr_testify//assert:go_default_library", "@io_bazel_rules_go//proto/wkt:empty_go_proto", "@io_bazel_rules_go//proto/wkt:timestamp_go_proto", "@io_k8s_apimachinery//pkg/apis/meta/v1:go_default_library", "@org_golang_google_grpc//codes:go_default_library", + "@org_golang_google_grpc//metadata:go_default_library", ], ) @@ -43,6 +47,7 @@ go_test( name = "go_default_test", srcs = [ "api_converter_test.go", + "auth_server_test.go", "experiment_server_test.go", "job_server_test.go", "list_request_util_test.go", @@ -58,19 +63,21 @@ go_test( embed = [":go_default_library"], deps = [ "//backend/api:go_default_library", + "//backend/src/apiserver/client:go_default_library", "//backend/src/apiserver/common:go_default_library", "//backend/src/apiserver/list:go_default_library", "//backend/src/apiserver/model:go_default_library", "//backend/src/apiserver/resource:go_default_library", - "//backend/src/apiserver/storage:go_default_library", "//backend/src/common/util:go_default_library", "//backend/src/crd/pkg/apis/scheduledworkflow/v1beta1:go_default_library", "@com_github_argoproj_argo//pkg/apis/workflow/v1alpha1:go_default_library", "@com_github_google_go_cmp//cmp:go_default_library", + "@com_github_spf13_viper//:go_default_library", "@com_github_stretchr_testify//assert:go_default_library", "@io_bazel_rules_go//proto/wkt:timestamp_go_proto", "@io_k8s_apimachinery//pkg/apis/meta/v1:go_default_library", "@io_k8s_apimachinery//pkg/types:go_default_library", "@org_golang_google_grpc//codes:go_default_library", + "@org_golang_google_grpc//metadata:go_default_library", ], ) diff --git a/backend/src/apiserver/server/api_converter.go b/backend/src/apiserver/server/api_converter.go index 20da9ee97d3..9a4bde45796 100644 --- a/backend/src/apiserver/server/api_converter.go +++ b/backend/src/apiserver/server/api_converter.go @@ -26,11 +26,25 @@ import ( ) func ToApiExperiment(experiment *model.Experiment) *api.Experiment { + resourceReferences := []*api.ResourceReference(nil) + if common.IsMultiUserMode() { + resourceReferences = []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: experiment.Namespace, + }, + Relationship: api.Relationship_OWNER, + }, + } + } return &api.Experiment{ - Id: experiment.UUID, - Name: experiment.Name, - Description: experiment.Description, - CreatedAt: ×tamp.Timestamp{Seconds: experiment.CreatedAtInSec}, + Id: experiment.UUID, + Name: experiment.Name, + Description: experiment.Description, + CreatedAt: ×tamp.Timestamp{Seconds: experiment.CreatedAtInSec}, + ResourceReferences: resourceReferences, + StorageState: api.Experiment_StorageState(api.Experiment_StorageState_value[experiment.StorageState]), } } @@ -42,13 +56,6 @@ func ToApiExperiments(experiments []*model.Experiment) []*api.Experiment { return apiExperiments } -func ToModelExperiment(experiment *api.Experiment) *model.Experiment { - return &model.Experiment{ - Name: experiment.Name, - Description: experiment.Description, - } -} - func ToApiPipeline(pipeline *model.Pipeline) *api.Pipeline { params, err := toApiParameters(pipeline.Parameters) if err != nil { @@ -58,22 +65,21 @@ func ToApiPipeline(pipeline *model.Pipeline) *api.Pipeline { } } - // TODO(jingzhang36): uncomment when exposing versions to API. - // defaultVersion, err := ToApiPipelineVersion(pipeline.DefaultVersion) - // if err != nil { - // return &api.Pipeline{ - // Id: pipeline.UUID, - // Error: err.Error(), - // } - // } + defaultVersion, err := ToApiPipelineVersion(pipeline.DefaultVersion) + if err != nil { + return &api.Pipeline{ + Id: pipeline.UUID, + Error: err.Error(), + } + } return &api.Pipeline{ - Id: pipeline.UUID, - CreatedAt: ×tamp.Timestamp{Seconds: pipeline.CreatedAtInSec}, - Name: pipeline.Name, - Description: pipeline.Description, - Parameters: params, - // DefaultVersion: defaultVersion, + Id: pipeline.UUID, + CreatedAt: ×tamp.Timestamp{Seconds: pipeline.CreatedAtInSec}, + Name: pipeline.Name, + Description: pipeline.Description, + Parameters: params, + DefaultVersion: defaultVersion, } } @@ -160,15 +166,16 @@ func toApiRun(run *model.Run) *api.Run { } } return &api.Run{ - CreatedAt: ×tamp.Timestamp{Seconds: run.CreatedAtInSec}, - Id: run.UUID, - Metrics: metrics, - Name: run.DisplayName, - StorageState: api.Run_StorageState(api.Run_StorageState_value[run.StorageState]), - Description: run.Description, - ScheduledAt: ×tamp.Timestamp{Seconds: run.ScheduledAtInSec}, - FinishedAt: ×tamp.Timestamp{Seconds: run.FinishedAtInSec}, - Status: run.Conditions, + CreatedAt: ×tamp.Timestamp{Seconds: run.CreatedAtInSec}, + Id: run.UUID, + Metrics: metrics, + Name: run.DisplayName, + ServiceAccount: run.ServiceAccount, + StorageState: api.Run_StorageState(api.Run_StorageState_value[run.StorageState]), + Description: run.Description, + ScheduledAt: ×tamp.Timestamp{Seconds: run.ScheduledAtInSec}, + FinishedAt: ×tamp.Timestamp{Seconds: run.FinishedAtInSec}, + Status: run.Conditions, PipelineSpec: &api.PipelineSpec{ PipelineId: run.PipelineId, PipelineName: run.PipelineName, @@ -209,12 +216,14 @@ func ToApiJob(job *model.Job) *api.Job { return &api.Job{ Id: job.UUID, Name: job.DisplayName, + ServiceAccount: job.ServiceAccount, Description: job.Description, Enabled: job.Enabled, CreatedAt: ×tamp.Timestamp{Seconds: job.CreatedAtInSec}, UpdatedAt: ×tamp.Timestamp{Seconds: job.UpdatedAtInSec}, Status: job.Conditions, MaxConcurrency: job.MaxConcurrency, + NoCatchup: job.NoCatchup, Trigger: toApiTrigger(job.Trigger), PipelineSpec: &api.PipelineSpec{ PipelineId: job.PipelineId, @@ -267,6 +276,10 @@ func toApiResourceType(modelType common.ResourceType) api.ResourceType { return api.ResourceType_EXPERIMENT case common.Job: return api.ResourceType_JOB + case common.PipelineVersion: + return api.ResourceType_PIPELINE_VERSION + case common.Namespace: + return api.ResourceType_NAMESPACE default: return api.ResourceType_UNKNOWN_RESOURCE_TYPE } @@ -298,7 +311,7 @@ func toApiTrigger(trigger model.Trigger) *api.Trigger { return &api.Trigger{Trigger: &api.Trigger_CronSchedule{CronSchedule: &cronSchedule}} } - if trigger.IntervalSecond != nil { + if trigger.IntervalSecond != nil && *trigger.IntervalSecond != 0 { var periodicSchedule api.PeriodicSchedule periodicSchedule.IntervalSecond = *trigger.IntervalSecond if trigger.PeriodicScheduleStartTimeInSec != nil { diff --git a/backend/src/apiserver/server/api_converter_test.go b/backend/src/apiserver/server/api_converter_test.go index e123a2ccf9e..8603a355237 100644 --- a/backend/src/apiserver/server/api_converter_test.go +++ b/backend/src/apiserver/server/api_converter_test.go @@ -30,36 +30,34 @@ func TestToApiPipeline(t *testing.T) { UUID: "pipeline1", CreatedAtInSec: 1, Parameters: "[]", - // TODO(jingzhang36): uncomment when exposing versions to API. - // DefaultVersion: &model.PipelineVersion{ - // UUID: "pipelineversion1", - // CreatedAtInSec: 1, - // Parameters: "[]", - // PipelineId: "pipeline1", - // CodeSourceUrl: "http://repo/22222", - // }, + DefaultVersion: &model.PipelineVersion{ + UUID: "pipelineversion1", + CreatedAtInSec: 1, + Parameters: "[]", + PipelineId: "pipeline1", + CodeSourceUrl: "http://repo/22222", + }, } apiPipeline := ToApiPipeline(modelPipeline) expectedApiPipeline := &api.Pipeline{ Id: "pipeline1", CreatedAt: ×tamp.Timestamp{Seconds: 1}, Parameters: []*api.Parameter{}, - // TODO(jingzhang36): uncomment when exposing versions to API. - // DefaultVersion: &api.PipelineVersion{ - // Id: "pipelineversion1", - // CreatedAt: ×tamp.Timestamp{Seconds: 1}, - // Parameters: []*api.Parameter{}, - // CodeSourceUrl: "http://repo/22222", - // ResourceReferences: []*api.ResourceReference{ - // &api.ResourceReference{ - // Key: &api.ResourceKey{ - // Id: "pipeline1", - // Type: api.ResourceType_PIPELINE, - // }, - // Relationship: api.Relationship_OWNER, - // }, - // }, - // }, + DefaultVersion: &api.PipelineVersion{ + Id: "pipelineversion1", + CreatedAt: ×tamp.Timestamp{Seconds: 1}, + Parameters: []*api.Parameter{}, + CodeSourceUrl: "http://repo/22222", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: "pipeline1", + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, } assert.Equal(t, expectedApiPipeline, apiPipeline) } @@ -69,8 +67,7 @@ func TestToApiPipeline_ErrorParsingField(t *testing.T) { UUID: "pipeline1", CreatedAtInSec: 1, Parameters: "[invalid parameter", - // TODO(jingzhang36): uncomment when exposing versions to API. - // DefaultVersion: &model.PipelineVersion{}, + DefaultVersion: &model.PipelineVersion{}, } apiPipeline := ToApiPipeline(modelPipeline) expectedApiPipeline := &api.Pipeline{ @@ -412,6 +409,7 @@ func TestToApiJobs(t *testing.T) { }, }, MaxConcurrency: 2, + NoCatchup: true, PipelineSpec: model.PipelineSpec{ PipelineId: "2", PipelineName: "p2", @@ -447,6 +445,7 @@ func TestToApiJobs(t *testing.T) { CreatedAt: ×tamp.Timestamp{Seconds: 2}, UpdatedAt: ×tamp.Timestamp{Seconds: 2}, MaxConcurrency: 2, + NoCatchup: true, Trigger: &api.Trigger{ Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ StartTime: ×tamp.Timestamp{Seconds: 2}, @@ -512,12 +511,51 @@ func TestToApiResourceReferences(t *testing.T) { ReferenceName: "e1", ReferenceType: common.Experiment, Relationship: common.Owner}, {ResourceUUID: "run1", ResourceType: common.Run, ReferenceUUID: "job1", ReferenceName: "j1", ReferenceType: common.Job, Relationship: common.Owner}, + {ResourceUUID: "run1", ResourceType: common.Run, ReferenceUUID: "pipelineversion1", + ReferenceName: "k1", ReferenceType: common.PipelineVersion, Relationship: common.Owner}, } expectedApiResourceReferences := []*api.ResourceReference{ {Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: "experiment1"}, Name: "e1", Relationship: api.Relationship_OWNER}, {Key: &api.ResourceKey{Type: api.ResourceType_JOB, Id: "job1"}, Name: "j1", Relationship: api.Relationship_OWNER}, + {Key: &api.ResourceKey{Type: api.ResourceType_PIPELINE_VERSION, Id: "pipelineversion1"}, + Name: "k1", Relationship: api.Relationship_OWNER}, } assert.Equal(t, expectedApiResourceReferences, toApiResourceReferences(resourceReferences)) } + +func TestToApiExperiments(t *testing.T) { + exp1 := &model.Experiment{ + UUID: "exp1", + CreatedAtInSec: 1, + Name: "experiment1", + Description: "My name is experiment1", + StorageState: "STORAGESTATE_AVAILABLE", + } + exp2 := &model.Experiment{ + UUID: "exp2", + CreatedAtInSec: 2, + Name: "experiment2", + Description: "My name is experiment2", + StorageState: "STORAGESTATE_ARCHIVED", + } + apiExps := ToApiExperiments([]*model.Experiment{exp1, exp2}) + expectedApiExps := []*api.Experiment{ + { + Id: "exp1", + Name: "experiment1", + Description: "My name is experiment1", + CreatedAt: ×tamp.Timestamp{Seconds: 1}, + StorageState: api.Experiment_StorageState(api.Experiment_StorageState_value["STORAGESTATE_AVAILABLE"]), + }, + { + Id: "exp2", + Name: "experiment2", + Description: "My name is experiment2", + CreatedAt: ×tamp.Timestamp{Seconds: 2}, + StorageState: api.Experiment_StorageState(api.Experiment_StorageState_value["STORAGESTATE_ARCHIVED"]), + }, + } + assert.Equal(t, expectedApiExps, apiExps) +} diff --git a/backend/src/apiserver/server/auth_server.go b/backend/src/apiserver/server/auth_server.go new file mode 100644 index 00000000000..0cc6750c6f1 --- /dev/null +++ b/backend/src/apiserver/server/auth_server.go @@ -0,0 +1,52 @@ +package server + +import ( + "context" + + "github.com/golang/protobuf/ptypes/empty" + api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/resource" + "github.com/kubeflow/pipelines/backend/src/common/util" +) + +type AuthServer struct { + resourceManager *resource.ResourceManager +} + +func (s *AuthServer) Authorize(ctx context.Context, request *api.AuthorizeRequest) ( + *empty.Empty, error) { + err := ValidateAuthorizeRequest(request) + if err != nil { + return nil, util.Wrap(err, "Authorize request is not valid") + } + + // TODO: when KFP changes authorization implementation to have more + // granularity, we need to start using resources and verb info in the + // request. + err = CanAccessNamespace(s.resourceManager, ctx, request.Namespace) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request") + } + + return &empty.Empty{}, nil +} + +func ValidateAuthorizeRequest(request *api.AuthorizeRequest) error { + if request == nil { + return util.NewInvalidInputError("request object is empty.") + } + if len(request.Namespace) == 0 { + return util.NewInvalidInputError("Namespace is empty. Please specify a valid namespace.") + } + if request.Resources == api.AuthorizeRequest_UNASSIGNED_RESOURCES { + return util.NewInvalidInputError("Resources not specified. Please specify a valid resources.") + } + if request.Verb == api.AuthorizeRequest_UNASSIGNED_VERB { + return util.NewInvalidInputError("Verb not specified. Please specify a valid verb.") + } + return nil +} + +func NewAuthServer(resourceManager *resource.ResourceManager) *AuthServer { + return &AuthServer{resourceManager: resourceManager} +} diff --git a/backend/src/apiserver/server/auth_server_test.go b/backend/src/apiserver/server/auth_server_test.go new file mode 100644 index 00000000000..b6797d28b95 --- /dev/null +++ b/backend/src/apiserver/server/auth_server_test.go @@ -0,0 +1,98 @@ +package server + +import ( + "context" + "testing" + + api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/spf13/viper" + "github.com/stretchr/testify/assert" + "google.golang.org/grpc/metadata" +) + +func TestAuthorizeRequest_SingleUserMode(t *testing.T) { + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + authServer := AuthServer{resourceManager: manager} + clients.KfamClientFake = client.NewFakeKFAMClientUnauthorized() + + md := metadata.New(map[string]string{}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + request := &api.AuthorizeRequest{ + Namespace: "ns1", + Resources: api.AuthorizeRequest_VIEWERS, + Verb: api.AuthorizeRequest_GET, + } + + _, err := authServer.Authorize(ctx, request) + // Authz is completely skipped without checking anything. + assert.Nil(t, err) +} + +func TestAuthorizeRequest_InvalidRequest(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + authServer := AuthServer{resourceManager: manager} + + md := metadata.New(map[string]string{}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + request := &api.AuthorizeRequest{ + Namespace: "", + Resources: api.AuthorizeRequest_UNASSIGNED_RESOURCES, + Verb: api.AuthorizeRequest_UNASSIGNED_VERB, + } + + _, err := authServer.Authorize(ctx, request) + assert.Error(t, err) + assert.EqualError(t, err, "Authorize request is not valid: Invalid input error: Namespace is empty. Please specify a valid namespace.") +} + +func TestAuthorizeRequest_Authorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + authServer := AuthServer{resourceManager: manager} + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: "accounts.google.com:user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + request := &api.AuthorizeRequest{ + Namespace: "ns1", + Resources: api.AuthorizeRequest_VIEWERS, + Verb: api.AuthorizeRequest_GET, + } + + _, err := authServer.Authorize(ctx, request) + assert.Nil(t, err) +} + +func TestAuthorizeRequest_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + clients, manager, _ := initWithExperiment_KFAM_Unauthorized(t) + defer clients.Close() + authServer := AuthServer{resourceManager: manager} + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: "accounts.google.com:user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + request := &api.AuthorizeRequest{ + Namespace: "ns1", + Resources: api.AuthorizeRequest_VIEWERS, + Verb: api.AuthorizeRequest_GET, + } + + _, err := authServer.Authorize(ctx, request) + assert.Error(t, err) + assert.EqualError(t, err, "Failed to authorize the request: Failed to authorize namespace: BadRequestError: Unauthorized access for user@google.com to namespace ns1: Unauthorized access for user@google.com to namespace ns1") +} diff --git a/backend/src/apiserver/server/experiment_server.go b/backend/src/apiserver/server/experiment_server.go index 61ac9bfd49a..2401fbbf9ef 100644 --- a/backend/src/apiserver/server/experiment_server.go +++ b/backend/src/apiserver/server/experiment_server.go @@ -5,9 +5,11 @@ import ( "github.com/golang/protobuf/ptypes/empty" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/pkg/errors" ) type ExperimentServer struct { @@ -20,7 +22,13 @@ func (s *ExperimentServer) CreateExperiment(ctx context.Context, request *api.Cr if err != nil { return nil, util.Wrap(err, "Validate experiment request failed.") } - newExperiment, err := s.resourceManager.CreateExperiment(ToModelExperiment(request.Experiment)) + + err = CanAccessNamespaceInResourceReferences(s.resourceManager, ctx, request.Experiment.ResourceReferences) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + + newExperiment, err := s.resourceManager.CreateExperiment(request.Experiment) if err != nil { return nil, util.Wrap(err, "Create experiment failed.") } @@ -29,6 +37,11 @@ func (s *ExperimentServer) CreateExperiment(ctx context.Context, request *api.Cr func (s *ExperimentServer) GetExperiment(ctx context.Context, request *api.GetExperimentRequest) ( *api.Experiment, error) { + err := s.canAccessExperiment(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + experiment, err := s.resourceManager.GetExperiment(request.Id) if err != nil { return nil, util.Wrap(err, "Get experiment failed.") @@ -44,7 +57,35 @@ func (s *ExperimentServer) ListExperiment(ctx context.Context, request *api.List return nil, util.Wrap(err, "Failed to create list options") } - experiments, total_size, nextPageToken, err := s.resourceManager.ListExperiments(opts) + filterContext, err := ValidateFilter(request.ResourceReferenceKey) + if err != nil { + return nil, util.Wrap(err, "Validating filter failed.") + } + + refKey := filterContext.ReferenceKey + if common.IsMultiUserMode() { + if refKey == nil || refKey.Type != common.Namespace { + return nil, util.NewInvalidInputError("Invalid resource references for experiment. ListExperiment requires filtering by namespace.") + } + namespace := refKey.ID + if len(namespace) == 0 { + return nil, util.NewInvalidInputError("Invalid resource references for experiment. Namespace is empty.") + } + err = isAuthorized(s.resourceManager, ctx, namespace) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize with API resource references") + } + } else { + if refKey != nil && refKey.Type == common.Namespace && len(refKey.ID) > 0 { + return nil, util.NewInvalidInputError("In single-user mode, ListExperiment cannot filter by namespace.") + } + // In single user mode, apply filter with empty namespace for backward compatibile. + filterContext = &common.FilterContext{ + ReferenceKey: &common.ReferenceKey{Type: common.Namespace, ID: ""}, + } + } + + experiments, total_size, nextPageToken, err := s.resourceManager.ListExperiments(filterContext, opts) if err != nil { return nil, util.Wrap(err, "List experiments failed.") } @@ -56,7 +97,12 @@ func (s *ExperimentServer) ListExperiment(ctx context.Context, request *api.List } func (s *ExperimentServer) DeleteExperiment(ctx context.Context, request *api.DeleteExperimentRequest) (*empty.Empty, error) { - err := s.resourceManager.DeleteExperiment(request.Id) + err := s.canAccessExperiment(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + + err = s.resourceManager.DeleteExperiment(request.Id) if err != nil { return nil, err } @@ -67,9 +113,70 @@ func ValidateCreateExperimentRequest(request *api.CreateExperimentRequest) error if request.Experiment == nil || request.Experiment.Name == "" { return util.NewInvalidInputError("Experiment name is empty. Please specify a valid experiment name.") } + + resourceReferences := request.Experiment.GetResourceReferences() + if common.IsMultiUserMode() { + if len(resourceReferences) != 1 || + resourceReferences[0].Key.Type != api.ResourceType_NAMESPACE || + resourceReferences[0].Relationship != api.Relationship_OWNER { + return util.NewInvalidInputError( + "Invalid resource references for experiment. Expect one namespace type with owner relationship. Got: %v", resourceReferences) + } + namespace := common.GetNamespaceFromAPIResourceReferences(request.Experiment.ResourceReferences) + if len(namespace) == 0 { + return util.NewInvalidInputError("Invalid resource references for experiment. Namespace is empty.") + } + } else if len(resourceReferences) > 0 { + return util.NewInvalidInputError("In single-user mode, CreateExperimentRequest shouldn't contain resource references.") + } return nil } +// TODO(chensun): consider refactoring the code to get rid of double-query of experiment. +func (s *ExperimentServer) canAccessExperiment(ctx context.Context, experimentID string) error { + if !common.IsMultiUserMode() { + // Skip authorization if not multi-user mode. + return nil + } + namespace, err := s.resourceManager.GetNamespaceFromExperimentID(experimentID) + if err != nil { + return util.Wrap(err, "Failed to authorize with the experiment ID.") + } + if len(namespace) == 0 { + return util.NewInternalServerError(errors.New("Empty namespace"), "The experiment doesn't have a valid namespace.") + } + + err = isAuthorized(s.resourceManager, ctx, namespace) + if err != nil { + return util.Wrap(err, "Failed to authorize with API resource references") + } + return nil +} + +func (s *ExperimentServer) ArchiveExperiment(ctx context.Context, request *api.ArchiveExperimentRequest) (*empty.Empty, error) { + err := s.canAccessExperiment(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the requests.") + } + err = s.resourceManager.ArchiveExperiment(request.Id) + if err != nil { + return nil, err + } + return &empty.Empty{}, nil +} + +func (s *ExperimentServer) UnarchiveExperiment(ctx context.Context, request *api.UnarchiveExperimentRequest) (*empty.Empty, error) { + err := s.canAccessExperiment(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the requests.") + } + err = s.resourceManager.UnarchiveExperiment(request.Id) + if err != nil { + return nil, err + } + return &empty.Empty{}, nil +} + func NewExperimentServer(resourceManager *resource.ResourceManager) *ExperimentServer { return &ExperimentServer{resourceManager: resourceManager} } diff --git a/backend/src/apiserver/server/experiment_server_test.go b/backend/src/apiserver/server/experiment_server_test.go index 28cb34221d8..0b9592bcab8 100644 --- a/backend/src/apiserver/server/experiment_server_test.go +++ b/backend/src/apiserver/server/experiment_server_test.go @@ -1,13 +1,20 @@ package server import ( + "context" + "strings" "testing" "github.com/golang/protobuf/ptypes/timestamp" + "github.com/google/go-cmp/cmp" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/spf13/viper" "github.com/stretchr/testify/assert" + "google.golang.org/grpc/metadata" ) func TestCreateExperiment(t *testing.T) { @@ -19,10 +26,11 @@ func TestCreateExperiment(t *testing.T) { result, err := server.CreateExperiment(nil, &api.CreateExperimentRequest{Experiment: experiment}) assert.Nil(t, err) expectedExperiment := &api.Experiment{ - Id: resource.DefaultFakeUUID, - Name: "ex1", - Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + Id: resource.DefaultFakeUUID, + Name: "ex1", + Description: "first experiment", + CreatedAt: ×tamp.Timestamp{Seconds: 1}, + StorageState: api.Experiment_STORAGESTATE_AVAILABLE, } assert.Equal(t, expectedExperiment, result) } @@ -38,6 +46,89 @@ func TestCreateExperiment_Failed(t *testing.T) { assert.Contains(t, err.Error(), "Create experiment failed.") } +func TestCreateExperiment_SingleUser_NamespaceNotAllowed(t *testing.T) { + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + server := ExperimentServer{resourceManager: resourceManager} + resourceReferences := []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + } + experiment := &api.Experiment{ + Name: "exp1", + Description: "first experiment", + ResourceReferences: resourceReferences, + } + + _, err := server.CreateExperiment(nil, &api.CreateExperimentRequest{Experiment: experiment}) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "In single-user mode, CreateExperimentRequest shouldn't contain resource references.") +} + +func TestCreateExperiment_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + clientManager.KfamClientFake = client.NewFakeKFAMClientUnauthorized() + resourceManager := resource.NewResourceManager(clientManager) + defer clientManager.Close() + + server := ExperimentServer{resourceManager: resourceManager} + experiment := &api.Experiment{ + Name: "exp1", + Description: "first experiment", + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + }} + + _, err := server.CreateExperiment(ctx, &api.CreateExperimentRequest{Experiment: experiment}) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestCreateExperiment_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + server := ExperimentServer{resourceManager: resourceManager} + resourceReferences := []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + } + experiment := &api.Experiment{ + Name: "exp1", + Description: "first experiment", + ResourceReferences: resourceReferences, + } + + result, err := server.CreateExperiment(ctx, &api.CreateExperimentRequest{Experiment: experiment}) + assert.Nil(t, err) + expectedExperiment := &api.Experiment{ + Id: resource.DefaultFakeUUID, + Name: "exp1", + Description: "first experiment", + CreatedAt: ×tamp.Timestamp{Seconds: 1}, + ResourceReferences: resourceReferences, + StorageState: api.Experiment_STORAGESTATE_AVAILABLE, + } + assert.Equal(t, expectedExperiment, result) +} + func TestGetExperiment(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager) @@ -48,10 +139,11 @@ func TestGetExperiment(t *testing.T) { assert.Nil(t, err) result, err := server.GetExperiment(nil, &api.GetExperimentRequest{Id: createResult.Id}) expectedExperiment := &api.Experiment{ - Id: createResult.Id, - Name: "ex1", - Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + Id: createResult.Id, + Name: "ex1", + Description: "first experiment", + CreatedAt: ×tamp.Timestamp{Seconds: 1}, + StorageState: api.Experiment_STORAGESTATE_AVAILABLE, } assert.Equal(t, expectedExperiment, result) } @@ -70,6 +162,58 @@ func TestGetExperiment_Failed(t *testing.T) { assert.Contains(t, err.Error(), "Get experiment failed.") } +func TestGetExperiment_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment_KFAM_Unauthorized(t) + defer clients.Close() + + server := ExperimentServer{manager} + + _, err := server.GetExperiment(ctx, &api.GetExperimentRequest{Id: resource.DefaultFakeUUID}) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestGetExperiment_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + server := ExperimentServer{resourceManager: resourceManager} + resourceReferences := []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + } + experiment := &api.Experiment{ + Name: "exp1", + Description: "first experiment", + ResourceReferences: resourceReferences, + } + + createResult, err := server.CreateExperiment(ctx, &api.CreateExperimentRequest{Experiment: experiment}) + assert.Nil(t, err) + result, err := server.GetExperiment(ctx, &api.GetExperimentRequest{Id: createResult.Id}) + expectedExperiment := &api.Experiment{ + Id: createResult.Id, + Name: "exp1", + Description: "first experiment", + CreatedAt: ×tamp.Timestamp{Seconds: 1}, + ResourceReferences: resourceReferences, + StorageState: api.Experiment_STORAGESTATE_AVAILABLE, + } + assert.Equal(t, expectedExperiment, result) +} + func TestListExperiment(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager) @@ -80,10 +224,11 @@ func TestListExperiment(t *testing.T) { assert.Nil(t, err) result, err := server.ListExperiment(nil, &api.ListExperimentsRequest{}) expectedExperiment := []*api.Experiment{{ - Id: createResult.Id, - Name: "ex1", - Description: "first experiment", - CreatedAt: ×tamp.Timestamp{Seconds: 1}, + Id: createResult.Id, + Name: "ex1", + Description: "first experiment", + CreatedAt: ×tamp.Timestamp{Seconds: 1}, + StorageState: api.Experiment_STORAGESTATE_AVAILABLE, }} assert.Equal(t, expectedExperiment, result.Experiments) } @@ -102,9 +247,362 @@ func TestListExperiment_Failed(t *testing.T) { assert.Contains(t, err.Error(), "List experiments failed.") } -func TestValidateCreateExperimentRequest_EmptyName(t *testing.T) { - experiment := &api.Experiment{Description: "first experiment"} - err := ValidateCreateExperimentRequest(&api.CreateExperimentRequest{Experiment: experiment}) +func TestListExperiment_SingleUser_NamespaceNotAllowed(t *testing.T) { + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + server := ExperimentServer{resourceManager: resourceManager} + experiment := &api.Experiment{Name: "ex1", Description: "first experiment"} + + _, err := server.CreateExperiment(nil, &api.CreateExperimentRequest{Experiment: experiment}) + assert.Nil(t, err) + _, err = server.ListExperiment(nil, &api.ListExperimentsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "ns1", + }, + }) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "In single-user mode, ListExperiment cannot filter by namespace.") +} + +func TestListExperiment_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment_KFAM_Unauthorized(t) + defer clients.Close() + + server := ExperimentServer{manager} + + _, err := server.ListExperiment(ctx, &api.ListExperimentsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "ns1", + }, + }) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "name is empty") + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestListExperiment_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + server := ExperimentServer{resourceManager: resourceManager} + resourceReferences := []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + } + experiment := &api.Experiment{ + Name: "exp1", + Description: "first experiment", + ResourceReferences: resourceReferences, + } + + createResult, err := server.CreateExperiment(ctx, &api.CreateExperimentRequest{Experiment: experiment}) + assert.Nil(t, err) + + tests := []struct { + name string + request *api.ListExperimentsRequest + wantError bool + errorMessage string + expectedExperiments []*api.Experiment + }{ + { + "Valid", + &api.ListExperimentsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "ns1", + }, + }, + false, + "", + []*api.Experiment{{ + Id: createResult.Id, + Name: "exp1", + Description: "first experiment", + CreatedAt: ×tamp.Timestamp{Seconds: 1}, + ResourceReferences: resourceReferences, + StorageState: api.Experiment_STORAGESTATE_AVAILABLE, + }}, + }, + { + "Valid but empty result", + &api.ListExperimentsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "ns2", + }, + }, + false, + "", + []*api.Experiment{}, + }, + { + "Missing resource reference key", + &api.ListExperimentsRequest{}, + true, + "Invalid resource references for experiment.", + nil, + }, + { + "Invalid resource reference key type", + &api.ListExperimentsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, + Id: "fake_id", + }, + }, + true, + "Invalid resource references for experiment.", + nil, + }, + { + "Empty namespace", + &api.ListExperimentsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "", + }, + }, + true, + "Invalid resource references for experiment. Namespace is empty.", + nil, + }, + } + + for _, tc := range tests { + response, err := server.ListExperiment(ctx, tc.request) + if tc.wantError { + if err == nil { + t.Errorf("TestListExperiment_Multiuser(%v) expect error but got nil", tc.name) + } else if !strings.Contains(err.Error(), tc.errorMessage) { + t.Errorf("TestListExperiment_Multiusert(%v) expect error containing: %v, but got: %v", tc.name, tc.errorMessage, err) + } + } else { + if err != nil { + t.Errorf("TestListExperiment_Multiuser(%v) expect no error but got %v", tc.name, err) + } else if !cmp.Equal(tc.expectedExperiments, response.Experiments) { + t.Errorf("TestListExperiment_Multiuser(%v) expect (%+v) but got (%+v)", tc.name, tc.expectedExperiments, response.Experiments) + } + } + } +} + +func TestValidateCreateExperimentRequest(t *testing.T) { + tests := []struct { + name string + experiment *api.Experiment + wantError bool + errorMessage string + }{ + { + "Valid", + &api.Experiment{Name: "exp1", Description: "first experiment"}, + false, + "", + }, + { + "Empty name", + &api.Experiment{Description: "first experiment"}, + true, + "name is empty", + }, + } + + for _, tc := range tests { + err := ValidateCreateExperimentRequest(&api.CreateExperimentRequest{Experiment: tc.experiment}) + if !tc.wantError && err != nil { + t.Errorf("TestValidateCreateExperimentRequest(%v) expect no error but got %v", tc.name, err) + } + if tc.wantError { + if err == nil { + t.Errorf("TestValidateCreateExperimentRequest(%v) expect error but got nil", tc.name) + } else if !strings.Contains(err.Error(), tc.errorMessage) { + t.Errorf("TestValidateCreateExperimentRequest(%v) expect error containing: %v, but got: %v", tc.name, tc.errorMessage, err) + } + } + } +} + +func TestValidateCreateExperimentRequest_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + tests := []struct { + name string + experiment *api.Experiment + wantError bool + errorMessage string + }{ + { + "Valid", + &api.Experiment{ + Name: "exp1", + Description: "first experiment", + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + }, + }, + false, + "", + }, + { + "Missing namespace", + &api.Experiment{ + Name: "exp1", + Description: "first experiment", + }, + true, + "Invalid resource references for experiment.", + }, + { + "Empty namespace", + &api.Experiment{ + Name: "exp1", + Description: "first experiment", + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: ""}, + Relationship: api.Relationship_OWNER, + }, + }, + }, + true, + "Invalid resource references for experiment. Namespace is empty.", + }, + { + "Multiple namespace", + &api.Experiment{ + Name: "exp1", + Description: "first experiment", + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns2"}, + Relationship: api.Relationship_OWNER, + }, + }, + }, + true, + "Invalid resource references for experiment.", + }, + { + "Invalid resource type", + &api.Experiment{ + Name: "exp1", + Description: "first experiment", + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: "exp2"}, + Relationship: api.Relationship_OWNER, + }, + }, + }, + true, + "Invalid resource references for experiment.", + }, + } + + for _, tc := range tests { + err := ValidateCreateExperimentRequest(&api.CreateExperimentRequest{Experiment: tc.experiment}) + if !tc.wantError && err != nil { + t.Errorf("TestValidateCreateExperimentRequest(%v) expect no error but got %v", tc.name, err) + } + if tc.wantError { + if err == nil { + t.Errorf("TestValidateCreateExperimentRequest(%v) expect error but got nil", tc.name) + } else if !strings.Contains(err.Error(), tc.errorMessage) { + t.Errorf("TestValidateCreateExperimentRequest(%v) expect error containing: %v, but got: %v", tc.name, tc.errorMessage, err) + } + } + } +} + +func TestArchiveAndUnarchiveExperiment(t *testing.T) { + // Create experiment and runs/jobs under it. + clients, manager, experiment := initWithExperimentAndPipelineVersion(t) + defer clients.Close() + runServer := NewRunServer(manager) + run1 := &api.Run{ + Name: "run1", + ResourceReferences: validReferencesOfExperimentAndPipelineVersion, + } + err := runServer.validateCreateRunRequest(&api.CreateRunRequest{Run: run1}) + assert.Nil(t, err) + _, err = runServer.CreateRun(nil, &api.CreateRunRequest{Run: run1}) + assert.Nil(t, err) + clients.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(resource.FakeUUIDOne, nil)) + manager = resource.NewResourceManager(clients) + runServer = NewRunServer(manager) + run2 := &api.Run{ + Name: "run2", + ResourceReferences: validReferencesOfExperimentAndPipelineVersion, + } + err = runServer.validateCreateRunRequest(&api.CreateRunRequest{Run: run2}) + assert.Nil(t, err) + _, err = runServer.CreateRun(nil, &api.CreateRunRequest{Run: run2}) + assert.Nil(t, err) + clients.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(resource.DefaultFakeUUID, nil)) + manager = resource.NewResourceManager(clients) + jobServer := NewJobServer(manager) + job1 := &api.Job{ + Name: "name1", + Enabled: true, + MaxConcurrency: 1, + Trigger: &api.Trigger{ + Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ + StartTime: ×tamp.Timestamp{Seconds: 1}, + Cron: "1 * * * *", + }}}, + ResourceReferences: validReferencesOfExperimentAndPipelineVersion, + } + err = jobServer.validateCreateJobRequest(&api.CreateJobRequest{Job: job1}) + assert.Nil(t, err) + _, err = jobServer.CreateJob(nil, &api.CreateJobRequest{Job: job1}) + assert.Nil(t, err) + + // Archive the experiment and thus all runs under it. + experimentServer := NewExperimentServer(manager) + _, err = experimentServer.ArchiveExperiment(nil, &api.ArchiveExperimentRequest{Id: experiment.UUID}) + assert.Nil(t, err) + result, err := experimentServer.GetExperiment(nil, &api.GetExperimentRequest{Id: experiment.UUID}) + assert.Equal(t, api.Experiment_STORAGESTATE_ARCHIVED, result.StorageState) + runs, err := runServer.ListRuns(nil, &api.ListRunsRequest{ResourceReferenceKey: &api.ResourceKey{Id: experiment.UUID, Type: api.ResourceType_EXPERIMENT}}) + assert.Equal(t, 2, len(runs.Runs)) + assert.Equal(t, api.Run_STORAGESTATE_ARCHIVED, runs.Runs[0].StorageState) + assert.Equal(t, api.Run_STORAGESTATE_ARCHIVED, runs.Runs[1].StorageState) + jobs, err := jobServer.ListJobs(nil, &api.ListJobsRequest{ResourceReferenceKey: &api.ResourceKey{Id: experiment.UUID, Type: api.ResourceType_EXPERIMENT}}) + assert.Equal(t, 1, len(jobs.Jobs)) + assert.Equal(t, false, jobs.Jobs[0].Enabled) + + // Unarchive the experiment and thus all runs under it. + _, err = experimentServer.UnarchiveExperiment(nil, &api.UnarchiveExperimentRequest{Id: experiment.UUID}) + assert.Nil(t, err) + result, err = experimentServer.GetExperiment(nil, &api.GetExperimentRequest{Id: experiment.UUID}) + assert.Equal(t, api.Experiment_STORAGESTATE_AVAILABLE, result.StorageState) + runs, err = runServer.ListRuns(nil, &api.ListRunsRequest{ResourceReferenceKey: &api.ResourceKey{Id: experiment.UUID, Type: api.ResourceType_EXPERIMENT}}) + assert.Equal(t, 2, len(runs.Runs)) + assert.Equal(t, api.Run_STORAGESTATE_ARCHIVED, runs.Runs[0].StorageState) + assert.Equal(t, api.Run_STORAGESTATE_ARCHIVED, runs.Runs[1].StorageState) + jobs, err = jobServer.ListJobs(nil, &api.ListJobsRequest{ResourceReferenceKey: &api.ResourceKey{Id: experiment.UUID, Type: api.ResourceType_EXPERIMENT}}) + assert.Equal(t, 1, len(jobs.Jobs)) + assert.Equal(t, false, jobs.Jobs[0].Enabled) } diff --git a/backend/src/apiserver/server/job_server.go b/backend/src/apiserver/server/job_server.go index 40858ede950..d2c4f6cf0be 100644 --- a/backend/src/apiserver/server/job_server.go +++ b/backend/src/apiserver/server/job_server.go @@ -19,9 +19,11 @@ import ( "github.com/golang/protobuf/ptypes/empty" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/pkg/errors" "github.com/robfig/cron" ) @@ -32,8 +34,13 @@ type JobServer struct { func (s *JobServer) CreateJob(ctx context.Context, request *api.CreateJobRequest) (*api.Job, error) { err := s.validateCreateJobRequest(request) if err != nil { - return nil, err + return nil, util.Wrap(err, "Validate create job request failed.") + } + err = CanAccessExperimentInResourceReferences(s.resourceManager, ctx, request.Job.ResourceReferences) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") } + newJob, err := s.resourceManager.CreateJob(request.Job) if err != nil { return nil, err @@ -42,6 +49,11 @@ func (s *JobServer) CreateJob(ctx context.Context, request *api.CreateJobRequest } func (s *JobServer) GetJob(ctx context.Context, request *api.GetJobRequest) (*api.Job, error) { + err := s.canAccessJob(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + job, err := s.resourceManager.GetJob(request.Id) if err != nil { return nil, err @@ -60,6 +72,35 @@ func (s *JobServer) ListJobs(ctx context.Context, request *api.ListJobsRequest) if err != nil { return nil, util.Wrap(err, "Validating filter failed.") } + + if common.IsMultiUserMode() { + refKey := filterContext.ReferenceKey + if refKey == nil { + return nil, util.NewInvalidInputError("ListJobs must filter by resource reference in multi-user mode.") + } + if refKey.Type == common.Namespace { + namespace := refKey.ID + if len(namespace) == 0 { + return nil, util.NewInvalidInputError("Invalid resource references for ListJobs. Namespace is empty.") + } + err = isAuthorized(s.resourceManager, ctx, namespace) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize with namespace resource reference.") + } + } else if refKey.Type == common.Experiment { + experimentID := refKey.ID + if len(experimentID) == 0 { + return nil, util.NewInvalidInputError("Invalid resource references for job. Experiment ID is empty.") + } + err = CanAccessExperiment(s.resourceManager, ctx, experimentID) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize with experiment resource reference.") + } + } else { + return nil, util.NewInvalidInputError("Invalid resource references for ListJobs. Got %+v", request.ResourceReferenceKey) + } + } + jobs, total_size, nextPageToken, err := s.resourceManager.ListJobs(filterContext, opts) if err != nil { return nil, util.Wrap(err, "Failed to list jobs.") @@ -68,15 +109,30 @@ func (s *JobServer) ListJobs(ctx context.Context, request *api.ListJobsRequest) } func (s *JobServer) EnableJob(ctx context.Context, request *api.EnableJobRequest) (*empty.Empty, error) { + err := s.canAccessJob(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + return s.enableJob(request.Id, true) } func (s *JobServer) DisableJob(ctx context.Context, request *api.DisableJobRequest) (*empty.Empty, error) { + err := s.canAccessJob(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + return s.enableJob(request.Id, false) } func (s *JobServer) DeleteJob(ctx context.Context, request *api.DeleteJobRequest) (*empty.Empty, error) { - err := s.resourceManager.DeleteJob(request.Id) + err := s.canAccessJob(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + + err = s.resourceManager.DeleteJob(request.Id) if err != nil { return nil, err } @@ -87,7 +143,9 @@ func (s *JobServer) validateCreateJobRequest(request *api.CreateJobRequest) erro job := request.Job if err := ValidatePipelineSpec(s.resourceManager, job.PipelineSpec); err != nil { - return util.Wrap(err, "The pipeline spec is invalid.") + if _, errResourceReference := CheckPipelineVersionReference(s.resourceManager, job.ResourceReferences); errResourceReference != nil { + return util.Wrap(err, "Neither pipeline spec nor pipeline version is valid."+errResourceReference.Error()) + } } if job.MaxConcurrency > 10 || job.MaxConcurrency < 1 { @@ -117,6 +175,26 @@ func (s *JobServer) enableJob(id string, enabled bool) (*empty.Empty, error) { return &empty.Empty{}, nil } +func (s *JobServer) canAccessJob(ctx context.Context, jobID string) error { + if common.IsMultiUserMode() == false { + // Skip authorization if not multi-user mode. + return nil + } + namespace, err := s.resourceManager.GetNamespaceFromJobID(jobID) + if err != nil { + return util.Wrap(err, "Failed to authorize with the job ID.") + } + if len(namespace) == 0 { + return util.NewInternalServerError(errors.New("Empty namespace"), "The job doesn't have a valid namespace.") + } + + err = isAuthorized(s.resourceManager, ctx, namespace) + if err != nil { + return util.Wrap(err, "Failed to authorize with API resource references") + } + return nil +} + func NewJobServer(resourceManager *resource.ResourceManager) *JobServer { return &JobServer{resourceManager: resourceManager} } diff --git a/backend/src/apiserver/server/job_server_test.go b/backend/src/apiserver/server/job_server_test.go index 125a1577518..f629a7d0318 100644 --- a/backend/src/apiserver/server/job_server_test.go +++ b/backend/src/apiserver/server/job_server_test.go @@ -1,22 +1,26 @@ package server import ( + "context" + "strings" "testing" "github.com/golang/protobuf/ptypes/timestamp" + "github.com/google/go-cmp/cmp" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/spf13/viper" "github.com/stretchr/testify/assert" "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" ) -func TestValidateApiJob(t *testing.T) { - clients, manager, experiment := initWithExperiment(t) - defer clients.Close() - server := NewJobServer(manager) - apiJob := &api.Job{ - Id: "job1", - Name: "name1", +var ( + commonApiJob = &api.Job{ + Name: "job1", Enabled: true, MaxConcurrency: 1, Trigger: &api.Trigger{ @@ -29,9 +33,63 @@ func TestValidateApiJob(t *testing.T) { Parameters: []*api.Parameter{{Name: "param1", Value: "world"}}, }, ResourceReferences: []*api.ResourceReference{ - {Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, Relationship: api.Relationship_OWNER}, + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: "123e4567-e89b-12d3-a456-426655440000"}, + Relationship: api.Relationship_OWNER, + }, + }, + } + + commonExpectedJob = &api.Job{ + Id: "123e4567-e89b-12d3-a456-426655440000", + Name: "job1", + ServiceAccount: "pipeline-runner", + Enabled: true, + MaxConcurrency: 1, + Trigger: &api.Trigger{ + Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ + StartTime: ×tamp.Timestamp{Seconds: 1}, + Cron: "1 * * * *", + }}}, + CreatedAt: ×tamp.Timestamp{Seconds: 2}, + UpdatedAt: ×tamp.Timestamp{Seconds: 2}, + Status: "NO_STATUS", + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflow.ToStringForStore(), + Parameters: []*api.Parameter{{Name: "param1", Value: "world"}}, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: "123e4567-e89b-12d3-a456-426655440000"}, + Name: "exp1", Relationship: api.Relationship_OWNER, + }, }, } +) + +func TestValidateApiJob(t *testing.T) { + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + server := NewJobServer(manager) + err := server.validateCreateJobRequest(&api.CreateJobRequest{Job: commonApiJob}) + assert.Nil(t, err) +} + +func TestValidateApiJob_WithPipelineVersion(t *testing.T) { + clients, manager, _ := initWithExperimentAndPipelineVersion(t) + defer clients.Close() + server := NewJobServer(manager) + apiJob := &api.Job{ + Name: "job1", + Enabled: true, + MaxConcurrency: 1, + Trigger: &api.Trigger{ + Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ + StartTime: ×tamp.Timestamp{Seconds: 1}, + Cron: "1 * * * *", + }}}, + ResourceReferences: validReferencesOfExperimentAndPipelineVersion, + } err := server.validateCreateJobRequest(&api.CreateJobRequest{Job: apiJob}) assert.Nil(t, err) } @@ -41,8 +99,7 @@ func TestValidateApiJob_ValidateNoExperimentResourceReferenceSucceeds(t *testing defer clients.Close() server := NewJobServer(manager) apiJob := &api.Job{ - Id: "job1", - Name: "name1", + Name: "job1", Enabled: true, MaxConcurrency: 1, Trigger: &api.Trigger{ @@ -65,8 +122,7 @@ func TestValidateApiJob_ValidatePipelineSpecFailed(t *testing.T) { defer clients.Close() server := NewJobServer(manager) apiJob := &api.Job{ - Id: "job1", - Name: "name1", + Name: "job1", Enabled: true, MaxConcurrency: 1, Trigger: &api.Trigger{ @@ -87,13 +143,32 @@ func TestValidateApiJob_ValidatePipelineSpecFailed(t *testing.T) { assert.Contains(t, err.Error(), "Pipeline not_exist_pipeline not found") } +func TestValidateApiJob_NoValidPipelineSpecOrPipelineVersion(t *testing.T) { + clients, manager, _ := initWithExperimentAndPipelineVersion(t) + defer clients.Close() + server := NewJobServer(manager) + apiJob := &api.Job{ + Name: "job1", + Enabled: true, + MaxConcurrency: 1, + Trigger: &api.Trigger{ + Trigger: &api.Trigger_CronSchedule{CronSchedule: &api.CronSchedule{ + StartTime: ×tamp.Timestamp{Seconds: 1}, + Cron: "1 * * * *", + }}}, + ResourceReferences: validReference, + } + err := server.validateCreateJobRequest(&api.CreateJobRequest{Job: apiJob}) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "Neither pipeline spec nor pipeline version is valid") +} + func TestValidateApiJob_InvalidCron(t *testing.T) { clients, manager, experiment := initWithExperiment(t) defer clients.Close() server := NewJobServer(manager) apiJob := &api.Job{ - Id: "job1", - Name: "name1", + Name: "job1", Enabled: true, MaxConcurrency: 1, Trigger: &api.Trigger{ @@ -119,8 +194,7 @@ func TestValidateApiJob_MaxConcurrencyOutOfRange(t *testing.T) { defer clients.Close() server := NewJobServer(manager) apiJob := &api.Job{ - Id: "job1", - Name: "name1", + Name: "job1", Enabled: true, MaxConcurrency: 0, Trigger: &api.Trigger{ @@ -146,8 +220,7 @@ func TestValidateApiJob_NegativeIntervalSecond(t *testing.T) { defer clients.Close() server := NewJobServer(manager) apiJob := &api.Job{ - Id: "job1", - Name: "name1", + Name: "job1", Enabled: true, MaxConcurrency: 0, Trigger: &api.Trigger{ @@ -166,3 +239,276 @@ func TestValidateApiJob_NegativeIntervalSecond(t *testing.T) { assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) assert.Contains(t, err.Error(), "The max concurrency of the job is out of range") } + +func TestCreateJob(t *testing.T) { + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + server := NewJobServer(manager) + job, err := server.CreateJob(nil, &api.CreateJobRequest{Job: commonApiJob}) + assert.Nil(t, err) + assert.Equal(t, commonExpectedJob, job) +} + +func TestCreateJob_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment_KFAM_Unauthorized(t) + defer clients.Close() + server := NewJobServer(manager) + _, err := server.CreateJob(ctx, &api.CreateJobRequest{Job: commonApiJob}) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestGetJob_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + server := NewJobServer(manager) + job, err := server.CreateJob(ctx, &api.CreateJobRequest{Job: commonApiJob}) + assert.Nil(t, err) + + clients.KfamClientFake = client.NewFakeKFAMClientUnauthorized() + manager = resource.NewResourceManager(clients) + server = NewJobServer(manager) + + _, err = server.GetJob(ctx, &api.GetJobRequest{Id: job.Id}) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestGetJob_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + server := NewJobServer(manager) + createdJob, err := server.CreateJob(ctx, &api.CreateJobRequest{Job: commonApiJob}) + assert.Nil(t, err) + + job, err := server.GetJob(ctx, &api.GetJobRequest{Id: createdJob.Id}) + assert.Nil(t, err) + assert.Equal(t, commonExpectedJob, job) +} + +func TestListJobs_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, experiment := initWithExperiment_KFAM_Unauthorized(t) + defer clients.Close() + server := NewJobServer(manager) + _, err := server.ListJobs(ctx, &api.ListJobsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, + Id: experiment.UUID, + }, + }) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") + + _, err = server.ListJobs(ctx, &api.ListJobsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "ns1", + }, + }) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestListJobs_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + server := NewJobServer(manager) + _, err := server.CreateJob(ctx, &api.CreateJobRequest{Job: commonApiJob}) + assert.Nil(t, err) + + var expectedJobs []*api.Job + expectedJobs = append(expectedJobs, commonExpectedJob) + expectedJobsEmpty := []*api.Job{} + + tests := []struct { + name string + request *api.ListJobsRequest + wantError bool + errorMessage string + expectedJobs []*api.Job + }{ + { + "Valid - filter by experiment", + &api.ListJobsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, + Id: "123e4567-e89b-12d3-a456-426655440000", + }, + }, + false, + "", + expectedJobs, + }, + { + "Valid - filter by namespace", + &api.ListJobsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "ns1", + }, + }, + false, + "", + expectedJobs, + }, + { + "Vailid - filter by namespace - no result", + &api.ListJobsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "no-such-ns", + }, + }, + false, + "", + expectedJobsEmpty, + }, + { + "Invalid - no filter", + &api.ListJobsRequest{}, + true, + "ListJobs must filter by resource reference", + nil, + }, + { + "Inalid - invalid filter type", + &api.ListJobsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_UNKNOWN_RESOURCE_TYPE, + Id: "unknown", + }, + }, + true, + "Unrecognized resource reference type", + nil, + }, + } + + for _, tc := range tests { + response, err := server.ListJobs(ctx, tc.request) + + if tc.wantError { + if err == nil { + t.Errorf("TestListJobs_Multiuser(%v) expect error but got nil", tc.name) + } else if !strings.Contains(err.Error(), tc.errorMessage) { + t.Errorf("TestListJobs_Multiusert(%v) expect error containing: %v, but got: %v", tc.name, tc.errorMessage, err) + } + } else { + if err != nil { + t.Errorf("TestListJobs_Multiuser(%v) expect no error but got %v", tc.name, err) + } else if !cmp.Equal(tc.expectedJobs, response.Jobs) { + t.Errorf("TestListJobs_Multiuser(%v) expect (%+v) but got (%+v)", tc.name, tc.expectedJobs, response.Jobs) + } + } + } +} + +func TestEnableJob_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + server := NewJobServer(manager) + job, err := server.CreateJob(ctx, &api.CreateJobRequest{Job: commonApiJob}) + assert.Nil(t, err) + + clients.KfamClientFake = client.NewFakeKFAMClientUnauthorized() + manager = resource.NewResourceManager(clients) + server = NewJobServer(manager) + + _, err = server.EnableJob(ctx, &api.EnableJobRequest{Id: job.Id}) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestEnableJob_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + server := NewJobServer(manager) + + job, err := server.CreateJob(ctx, &api.CreateJobRequest{Job: commonApiJob}) + assert.Nil(t, err) + + _, err = server.EnableJob(ctx, &api.EnableJobRequest{Id: job.Id}) + assert.Nil(t, err) +} + +func TestDisableJob_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + server := NewJobServer(manager) + job, err := server.CreateJob(ctx, &api.CreateJobRequest{Job: commonApiJob}) + assert.Nil(t, err) + + clients.KfamClientFake = client.NewFakeKFAMClientUnauthorized() + manager = resource.NewResourceManager(clients) + server = NewJobServer(manager) + + _, err = server.DisableJob(ctx, &api.DisableJobRequest{Id: job.Id}) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestDisableJob_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + server := NewJobServer(manager) + + job, err := server.CreateJob(ctx, &api.CreateJobRequest{Job: commonApiJob}) + assert.Nil(t, err) + + _, err = server.DisableJob(ctx, &api.DisableJobRequest{Id: job.Id}) + assert.Nil(t, err) +} diff --git a/backend/src/apiserver/server/pipeline_server.go b/backend/src/apiserver/server/pipeline_server.go index 8c3641440e6..635b5e0b35a 100644 --- a/backend/src/apiserver/server/pipeline_server.go +++ b/backend/src/apiserver/server/pipeline_server.go @@ -118,3 +118,81 @@ func ValidateCreatePipelineRequest(request *api.CreatePipelineRequest) error { func NewPipelineServer(resourceManager *resource.ResourceManager) *PipelineServer { return &PipelineServer{resourceManager: resourceManager, httpClient: http.DefaultClient} } + +func (s *PipelineServer) CreatePipelineVersion(ctx context.Context, request *api.CreatePipelineVersionRequest) (*api.PipelineVersion, error) { + // Read pipeline file. + if request.Version == nil || request.Version.PackageUrl == nil || + len(request.Version.PackageUrl.PipelineUrl) == 0 { + return nil, util.NewInvalidInputError("Pipeline URL is empty. Please specify a valid URL.") + } + pipelineUrl := request.Version.PackageUrl.PipelineUrl + if _, err := url.ParseRequestURI(request.Version.PackageUrl.PipelineUrl); err != nil { + return nil, util.NewInvalidInputError("Invalid Pipeline URL %v. Please specify a valid URL", request.Version.PackageUrl.PipelineUrl) + } + resp, err := s.httpClient.Get(pipelineUrl) + if err != nil || resp.StatusCode != http.StatusOK { + return nil, util.NewInternalServerError(err, "Failed to download the pipeline from %v. Please double check the URL is valid and can be accessed by the pipeline system.", pipelineUrl) + } + pipelineFileName := path.Base(pipelineUrl) + pipelineFile, err := ReadPipelineFile(pipelineFileName, resp.Body, MaxFileLength) + if err != nil { + return nil, util.Wrap(err, "The URL is valid but pipeline system failed to read the file.") + } + + version, err := s.resourceManager.CreatePipelineVersion(request.Version, pipelineFile) + if err != nil { + return nil, util.Wrap(err, "Failed to create a version.") + } + return ToApiPipelineVersion(version) +} + +func (s *PipelineServer) GetPipelineVersion(ctx context.Context, request *api.GetPipelineVersionRequest) (*api.PipelineVersion, error) { + version, err := s.resourceManager.GetPipelineVersion(request.VersionId) + if err != nil { + return nil, util.Wrap(err, "Get pipeline version failed.") + } + return ToApiPipelineVersion(version) +} + +func (s *PipelineServer) ListPipelineVersions(ctx context.Context, request *api.ListPipelineVersionsRequest) (*api.ListPipelineVersionsResponse, error) { + opts, err := validatedListOptions( + &model.PipelineVersion{}, + request.PageToken, + int(request.PageSize), + request.SortBy, + request.Filter) + + if err != nil { + return nil, util.Wrap(err, "Failed to create list options") + } + + pipelineVersions, total_size, nextPageToken, err := + s.resourceManager.ListPipelineVersions(request.ResourceKey.Id, opts) + if err != nil { + return nil, util.Wrap(err, "List pipeline versions failed.") + } + apiPipelineVersions, _ := ToApiPipelineVersions(pipelineVersions) + + return &api.ListPipelineVersionsResponse{ + Versions: apiPipelineVersions, + NextPageToken: nextPageToken, + TotalSize: int32(total_size)}, nil +} + +func (s *PipelineServer) DeletePipelineVersion(ctx context.Context, request *api.DeletePipelineVersionRequest) (*empty.Empty, error) { + err := s.resourceManager.DeletePipelineVersion(request.VersionId) + if err != nil { + return nil, util.Wrap(err, "Delete pipeline versions failed.") + } + + return &empty.Empty{}, nil +} + +func (s *PipelineServer) GetPipelineVersionTemplate(ctx context.Context, request *api.GetPipelineVersionTemplateRequest) (*api.GetTemplateResponse, error) { + template, err := s.resourceManager.GetPipelineVersionTemplate(request.VersionId) + if err != nil { + return nil, util.Wrap(err, "Get pipeline template failed.") + } + + return &api.GetTemplateResponse{Template: string(template)}, nil +} diff --git a/backend/src/apiserver/server/pipeline_server_test.go b/backend/src/apiserver/server/pipeline_server_test.go index 30028446316..62679969f89 100644 --- a/backend/src/apiserver/server/pipeline_server_test.go +++ b/backend/src/apiserver/server/pipeline_server_test.go @@ -27,8 +27,8 @@ func TestCreatePipeline_YAML(t *testing.T) { pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} pipeline, err := pipelineServer.CreatePipeline(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ - Url:&api.Url{PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, - Name:"argument-parameters", + Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, + Name: "argument-parameters", }}) assert.Nil(t, err) @@ -54,8 +54,8 @@ func TestCreatePipeline_Tarball(t *testing.T) { pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} pipeline, err := pipelineServer.CreatePipeline(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ - Url:&api.Url{PipelineUrl: httpServer.URL + "/arguments_tarball/arguments.tar.gz"}, - Name:"argument-parameters", + Url: &api.Url{PipelineUrl: httpServer.URL + "/arguments_tarball/arguments.tar.gz"}, + Name: "argument-parameters", }}) assert.Nil(t, err) @@ -81,8 +81,8 @@ func TestCreatePipeline_InvalidYAML(t *testing.T) { pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} _, err := pipelineServer.CreatePipeline(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ - Url:&api.Url{PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, - Name:"argument-parameters", + Url: &api.Url{PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, + Name: "argument-parameters", }}) assert.NotNil(t, err) @@ -101,12 +101,145 @@ func TestCreatePipeline_InvalidURL(t *testing.T) { pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} _, err := pipelineServer.CreatePipeline(context.Background(), &api.CreatePipelineRequest{ Pipeline: &api.Pipeline{ - Url:&api.Url{PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, - Name:"argument-parameters", + Url: &api.Url{PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, + Name: "argument-parameters", }}) assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) } +func TestCreatePipelineVersion_YAML(t *testing.T) { + httpServer := getMockServer(t) + // Close the server when test finishes + defer httpServer.Close() + + clientManager := resource.NewFakeClientManagerOrFatal( + util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + pipelineServer := PipelineServer{ + resourceManager: resourceManager, httpClient: httpServer.Client()} + pipelineVersion, err := pipelineServer.CreatePipelineVersion( + context.Background(), &api.CreatePipelineVersionRequest{ + Version: &api.PipelineVersion{ + PackageUrl: &api.Url{ + PipelineUrl: httpServer.URL + "/arguments-parameters.yaml"}, + Name: "argument-parameters", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: "pipeline", + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }}}}) + + assert.Nil(t, err) + assert.NotNil(t, pipelineVersion) + assert.Equal(t, "argument-parameters", pipelineVersion.Name) + newPipelineVersion, err := resourceManager.GetPipelineVersion( + pipelineVersion.Id) + assert.Nil(t, err) + assert.NotNil(t, newPipelineVersion) + var params []api.Parameter + err = json.Unmarshal([]byte(newPipelineVersion.Parameters), ¶ms) + assert.Nil(t, err) + assert.Equal(t, []api.Parameter{ + {Name: "param1", Value: "hello"}, {Name: "param2"}}, params) +} + +func TestCreatePipelineVersion_InvalidYAML(t *testing.T) { + httpServer := getMockServer(t) + // Close the server when test finishes + defer httpServer.Close() + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} + _, err := pipelineServer.CreatePipelineVersion( + context.Background(), &api.CreatePipelineVersionRequest{ + Version: &api.PipelineVersion{ + PackageUrl: &api.Url{ + PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, + Name: "argument-parameters", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: "pipeline", + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }}}}) + + assert.NotNil(t, err) + assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) + assert.Contains(t, err.Error(), "Unexpected resource type") +} + +func TestCreatePipelineVersion_Tarball(t *testing.T) { + httpServer := getMockServer(t) + // Close the server when test finishes + defer httpServer.Close() + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} + pipelineVersion, err := pipelineServer.CreatePipelineVersion( + context.Background(), &api.CreatePipelineVersionRequest{ + Version: &api.PipelineVersion{ + PackageUrl: &api.Url{ + PipelineUrl: httpServer.URL + + "/arguments_tarball/arguments.tar.gz"}, + Name: "argument-parameters", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: "pipeline", + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }}}}) + + assert.Nil(t, err) + assert.NotNil(t, pipelineVersion) + assert.Equal(t, "argument-parameters", pipelineVersion.Name) + newPipelineVersion, err := resourceManager.GetPipelineVersion(pipelineVersion.Id) + assert.Nil(t, err) + assert.NotNil(t, newPipelineVersion) + var params []api.Parameter + err = json.Unmarshal([]byte(newPipelineVersion.Parameters), ¶ms) + assert.Nil(t, err) + assert.Equal(t, []api.Parameter{{Name: "param1", Value: "hello"}, {Name: "param2"}}, params) +} + +func TestCreatePipelineVersion_InvalidURL(t *testing.T) { + // Use a bad mock server + httpServer := getBadMockServer() + // Close the server when test finishes + defer httpServer.Close() + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + pipelineServer := PipelineServer{resourceManager: resourceManager, httpClient: httpServer.Client()} + _, err := pipelineServer.CreatePipelineVersion(context.Background(), &api.CreatePipelineVersionRequest{ + Version: &api.PipelineVersion{ + PackageUrl: &api.Url{ + PipelineUrl: httpServer.URL + "/invalid-workflow.yaml"}, + Name: "argument-parameters", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: "pipeline", + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }}}}) + + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) +} + func getMockServer(t *testing.T) *httptest.Server { httpServer := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { // Send response to be tested diff --git a/backend/src/apiserver/server/pipeline_upload_server.go b/backend/src/apiserver/server/pipeline_upload_server.go index 001da385279..98335d8bd40 100644 --- a/backend/src/apiserver/server/pipeline_upload_server.go +++ b/backend/src/apiserver/server/pipeline_upload_server.go @@ -18,18 +18,23 @@ import ( "encoding/json" "fmt" "net/http" - "time" + "net/url" "github.com/golang/glog" + "github.com/golang/protobuf/jsonpb" api "github.com/kubeflow/pipelines/backend/api/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/pkg/errors" ) // These are valid conditions of a ScheduledWorkflow. const ( - FormFileKey = "uploadfile" - NameQueryStringKey = "name" + FormFileKey = "uploadfile" + NameQueryStringKey = "name" + DescriptionQueryStringKey = "description" + // Pipeline Id in the query string specifies a pipeline when creating versions. + PipelineKey = "pipelineid" ) type PipelineUploadServer struct { @@ -46,7 +51,7 @@ func (s *PipelineUploadServer) UploadPipeline(w http.ResponseWriter, r *http.Req glog.Infof("Upload pipeline called") file, header, err := r.FormFile(FormFileKey) if err != nil { - s.writeErrorToResponse(w, http.StatusBadRequest, util.Wrap(err, "Failed to read pipeline form file")) + s.writeErrorToResponse(w, http.StatusBadRequest, util.Wrap(err, "Failed to read pipeline from file")) return } defer file.Close() @@ -63,30 +68,92 @@ func (s *PipelineUploadServer) UploadPipeline(w http.ResponseWriter, r *http.Req s.writeErrorToResponse(w, http.StatusBadRequest, util.Wrap(err, "Invalid pipeline name.")) return } - newPipeline, err := s.resourceManager.CreatePipeline(pipelineName, "", pipelineFile) + // We don't set a max length for pipeline description here, since in our DB the description type is longtext. + pipelineDescription, err := url.QueryUnescape(r.URL.Query().Get(DescriptionQueryStringKey)) if err != nil { - s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Error creating pipeline")) + s.writeErrorToResponse(w, http.StatusBadRequest, util.Wrap(err, "Error read pipeline description.")) return } - apiPipeline := ToApiPipeline(newPipeline) - createdAt := time.Unix(apiPipeline.CreatedAt.Seconds, int64(apiPipeline.CreatedAt.Nanos)).UTC().Format(time.RFC3339) - apiPipeline.CreatedAt = nil - // Create an anonymous struct to stream time conforming RFC3339 format "1970-01-01T00:00:01Z" - // Otherwise it returns "created_at":{"seconds":1} - pipeline := struct { - api.Pipeline - CreatedAtDateTime string `json:"created_at"` - }{ - *apiPipeline, - createdAt, + newPipeline, err := s.resourceManager.CreatePipeline(pipelineName, pipelineDescription, pipelineFile) + if err != nil { + s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Error creating pipeline")) + return } - pipelineJson, err := json.Marshal(pipeline) + + w.Header().Set("Content-Type", "application/json") + marshaler := &jsonpb.Marshaler{EnumsAsInts: false, OrigName: true} + err = marshaler.Marshal(w, ToApiPipeline(newPipeline)) if err != nil { s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Error creating pipeline")) return } +} + +// HTTP multipart endpoint for uploading pipeline version file. +// https://www.w3.org/Protocols/rfc1341/7_2_Multipart.html +// This endpoint is not exposed through grpc endpoint, since grpc-gateway can't convert the gRPC +// endpoint to the HTTP endpoint. +// See https://github.com/grpc-ecosystem/grpc-gateway/issues/500 +// Thus we create the HTTP endpoint directly and using swagger to auto generate the HTTP client. +func (s *PipelineUploadServer) UploadPipelineVersion(w http.ResponseWriter, r *http.Request) { + glog.Infof("Upload pipeline version called") + file, header, err := r.FormFile(FormFileKey) + if err != nil { + s.writeErrorToResponse(w, http.StatusBadRequest, util.Wrap(err, "Failed to read pipeline version from file")) + return + } + defer file.Close() + + pipelineFile, err := ReadPipelineFile(header.Filename, file, MaxFileLength) + if err != nil { + s.writeErrorToResponse(w, http.StatusBadRequest, util.Wrap(err, "Error read pipeline version file.")) + return + } + + versionNameQueryString := r.URL.Query().Get(NameQueryStringKey) + // If new version's name is not included in query string, use file name. + pipelineVersionName, err := GetPipelineName(versionNameQueryString, header.Filename) + if err != nil { + s.writeErrorToResponse(w, http.StatusBadRequest, util.Wrap(err, "Invalid pipeline version name.")) + return + } + + pipelineId := r.URL.Query().Get(PipelineKey) + if len(pipelineId) == 0 { + s.writeErrorToResponse(w, http.StatusBadRequest, errors.New("Please specify a pipeline id when creating versions.")) + return + } + + newPipelineVersion, err := s.resourceManager.CreatePipelineVersion( + &api.PipelineVersion{ + Name: pipelineVersionName, + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: pipelineId, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, pipelineFile) + if err != nil { + s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Error creating pipeline version")) + return + } + w.Header().Set("Content-Type", "application/json") - w.Write(pipelineJson) + marshaler := &jsonpb.Marshaler{EnumsAsInts: false, OrigName: true} + createdPipelineVersion, err := ToApiPipelineVersion(newPipelineVersion) + if err != nil { + s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Error creating pipeline version")) + return + } + err = marshaler.Marshal(w, createdPipelineVersion) + if err != nil { + s.writeErrorToResponse(w, http.StatusInternalServerError, util.Wrap(err, "Error creating pipeline version")) + return + } } func (s *PipelineUploadServer) writeErrorToResponse(w http.ResponseWriter, code int, err error) { diff --git a/backend/src/apiserver/server/pipeline_upload_server_test.go b/backend/src/apiserver/server/pipeline_upload_server_test.go index 2cf127d5371..806ea0aec56 100644 --- a/backend/src/apiserver/server/pipeline_upload_server_test.go +++ b/backend/src/apiserver/server/pipeline_upload_server_test.go @@ -16,25 +16,28 @@ package server import ( "bytes" + "encoding/json" "fmt" + "io" "mime/multipart" "net/http" "net/http/httptest" "net/url" - "testing" - - "io" - "os" + "testing" "github.com/kubeflow/pipelines/backend/src/apiserver/list" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" - "github.com/kubeflow/pipelines/backend/src/apiserver/storage" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/stretchr/testify/assert" ) +const ( + fakeVersionUUID = "123e4567-e89b-12d3-a456-526655440000" + fakeVersionName = "a_fake_version_name" +) + func TestUploadPipeline_YAML(t *testing.T) { clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) resourceManager := resource.NewResourceManager(clientManager) @@ -51,11 +54,20 @@ func TestUploadPipeline_YAML(t *testing.T) { handler := http.HandlerFunc(server.UploadPipeline) handler.ServeHTTP(rr, req) assert.Equal(t, 200, rr.Code) - // Verify time format is RFC3339 - assert.Contains(t, rr.Body.String(), `"created_at":"1970-01-01T00:00:01Z"`) + // Verify time format is RFC3339. + parsedResponse := struct { + CreatedAt string `json:"created_at"` + DefaultVersion struct { + CreatedAt string `json:"created_at"` + } `json:"default_version"` + }{} + json.Unmarshal(rr.Body.Bytes(), &parsedResponse) + assert.Equal(t, "1970-01-01T00:00:01Z", parsedResponse.CreatedAt) + assert.Equal(t, "1970-01-01T00:00:01Z", parsedResponse.DefaultVersion.CreatedAt) // Verify stored in object store - template, err := clientManager.ObjectStore().GetFile(storage.CreatePipelinePath(resource.DefaultFakeUUID)) + objStore := clientManager.ObjectStore() + template, err := objStore.GetFile(objStore.GetPipelineKey(resource.DefaultFakeUUID)) assert.Nil(t, err) assert.NotNil(t, template) @@ -84,6 +96,54 @@ func TestUploadPipeline_YAML(t *testing.T) { assert.Equal(t, str, "") assert.Equal(t, 1, total_size) assert.Equal(t, pkgsExpect, pkg) + + // Upload a new version under this pipeline + + // Set the fake uuid generator with a new uuid to avoid generate a same uuid as above. + clientManager.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(fakeVersionUUID, nil)) + resourceManager = resource.NewResourceManager(clientManager) + server = PipelineUploadServer{resourceManager: resourceManager} + req, _ = http.NewRequest("POST", "/apis/v1beta1/pipelines/upload_version?name="+fakeVersionName+"&pipelineid="+resource.DefaultFakeUUID, bytes.NewReader(b.Bytes())) + req.Header.Set("Content-Type", w.FormDataContentType()) + rr = httptest.NewRecorder() + handler = http.HandlerFunc(server.UploadPipelineVersion) + handler.ServeHTTP(rr, req) + + assert.Equal(t, 200, rr.Code) + assert.Contains(t, rr.Body.String(), `"created_at":"1970-01-01T00:00:02Z"`) + + // Verify stored in object store + objStore = clientManager.ObjectStore() + template, err = objStore.GetFile(objStore.GetPipelineKey(fakeVersionUUID)) + assert.Nil(t, err) + assert.NotNil(t, template) + opts, err = list.NewOptions(&model.PipelineVersion{}, 2, "", nil) + assert.Nil(t, err) + // Verify metadata in db + versionsExpect := []*model.PipelineVersion{ + { + UUID: resource.DefaultFakeUUID, + CreatedAtInSec: 1, + Name: "hello-world.yaml", + Parameters: "[]", + Status: model.PipelineVersionReady, + PipelineId: resource.DefaultFakeUUID, + }, + { + UUID: fakeVersionUUID, + CreatedAtInSec: 2, + Name: fakeVersionName, + Parameters: "[]", + Status: model.PipelineVersionReady, + PipelineId: resource.DefaultFakeUUID, + }, + } + // Expect 2 versions, one is created by default when creating pipeline and the other is what we manually created + versions, total_size, str, err := clientManager.PipelineStore().ListPipelineVersions(resource.DefaultFakeUUID, opts) + assert.Nil(t, err) + assert.Equal(t, str, "") + assert.Equal(t, 2, total_size) + assert.Equal(t, versionsExpect, versions) } func TestUploadPipeline_Tarball(t *testing.T) { @@ -107,7 +167,8 @@ func TestUploadPipeline_Tarball(t *testing.T) { assert.Contains(t, rr.Body.String(), `"created_at":"1970-01-01T00:00:01Z"`) // Verify stored in object store - template, err := clientManager.ObjectStore().GetFile(storage.CreatePipelinePath(resource.DefaultFakeUUID)) + objStore := clientManager.ObjectStore() + template, err := objStore.GetFile(objStore.GetPipelineKey(resource.DefaultFakeUUID)) assert.Nil(t, err) assert.NotNil(t, template) @@ -135,6 +196,60 @@ func TestUploadPipeline_Tarball(t *testing.T) { assert.Equal(t, str, "") assert.Equal(t, 1, total_size) assert.Equal(t, pkgsExpect, pkg) + + // Upload a new version under this pipeline + + // Set the fake uuid generator with a new uuid to avoid generate a same uuid as above. + clientManager.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(fakeVersionUUID, nil)) + resourceManager = resource.NewResourceManager(clientManager) + server = PipelineUploadServer{resourceManager: resourceManager} + b = &bytes.Buffer{} + w = multipart.NewWriter(b) + part, _ = w.CreateFormFile("uploadfile", "arguments-version.tar.gz") + fileReader, _ = os.Open("test/arguments_tarball/arguments-version.tar.gz") + io.Copy(part, fileReader) + w.Close() + req, _ = http.NewRequest("POST", "/apis/v1beta1/pipelines/upload_version?pipelineid="+resource.DefaultFakeUUID, bytes.NewReader(b.Bytes())) + req.Header.Set("Content-Type", w.FormDataContentType()) + rr = httptest.NewRecorder() + handler = http.HandlerFunc(server.UploadPipelineVersion) + handler.ServeHTTP(rr, req) + + assert.Equal(t, 200, rr.Code) + assert.Contains(t, rr.Body.String(), `"created_at":"1970-01-01T00:00:02Z"`) + + // Verify stored in object store + objStore = clientManager.ObjectStore() + template, err = objStore.GetFile(objStore.GetPipelineKey(fakeVersionUUID)) + assert.Nil(t, err) + assert.NotNil(t, template) + opts, err = list.NewOptions(&model.PipelineVersion{}, 2, "", nil) + assert.Nil(t, err) + // Verify metadata in db + versionsExpect := []*model.PipelineVersion{ + { + UUID: resource.DefaultFakeUUID, + CreatedAtInSec: 1, + Name: "arguments.tar.gz", + Parameters: "[{\"name\":\"param1\",\"value\":\"hello\"},{\"name\":\"param2\"}]", + Status: model.PipelineVersionReady, + PipelineId: resource.DefaultFakeUUID, + }, + { + UUID: fakeVersionUUID, + CreatedAtInSec: 2, + Name: "arguments-version.tar.gz", + Parameters: "[{\"name\":\"param1\",\"value\":\"hello\"},{\"name\":\"param2\"}]", + Status: model.PipelineVersionReady, + PipelineId: resource.DefaultFakeUUID, + }, + } + // Expect 2 versions, one is created by default when creating pipeline and the other is what we manually created + versions, total_size, str, err := clientManager.PipelineStore().ListPipelineVersions(resource.DefaultFakeUUID, opts) + assert.Nil(t, err) + assert.Equal(t, str, "") + assert.Equal(t, 2, total_size) + assert.Equal(t, versionsExpect, versions) } func TestUploadPipeline_GetFormFileError(t *testing.T) { @@ -175,7 +290,8 @@ func TestUploadPipeline_SpecifyFileName(t *testing.T) { assert.Equal(t, 200, rr.Code) // Verify stored in object store - template, err := clientManager.ObjectStore().GetFile(storage.CreatePipelinePath(resource.DefaultFakeUUID)) + objStore := clientManager.ObjectStore() + template, err := objStore.GetFile(objStore.GetPipelineKey(resource.DefaultFakeUUID)) assert.Nil(t, err) assert.NotNil(t, template) @@ -225,3 +341,121 @@ func TestUploadPipeline_FileNameTooLong(t *testing.T) { assert.Equal(t, 400, rr.Code) assert.Contains(t, string(rr.Body.Bytes()), "Pipeline name too long") } + +func TestUploadPipeline_SpecifyFileDescription(t *testing.T) { + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + server := PipelineUploadServer{resourceManager: resourceManager} + b := &bytes.Buffer{} + w := multipart.NewWriter(b) + part, _ := w.CreateFormFile("uploadfile", "hello-world.yaml") + io.Copy(part, bytes.NewBufferString("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + w.Close() + req, _ := http.NewRequest("POST", fmt.Sprintf("/apis/v1beta1/pipelines/upload?name=%s&description=%s", url.PathEscape("foo bar"), url.PathEscape("description of foo bar")), bytes.NewReader(b.Bytes())) + req.Header.Set("Content-Type", w.FormDataContentType()) + + rr := httptest.NewRecorder() + handler := http.HandlerFunc(server.UploadPipeline) + handler.ServeHTTP(rr, req) + assert.Equal(t, 200, rr.Code) + + // Verify stored in object store + objStore := clientManager.ObjectStore() + template, err := objStore.GetFile(objStore.GetPipelineKey(resource.DefaultFakeUUID)) + assert.Nil(t, err) + assert.NotNil(t, template) + + opts, err := list.NewOptions(&model.Pipeline{}, 2, "", nil) + assert.Nil(t, err) + // Verify metadata in db + pkgsExpect := []*model.Pipeline{ + { + UUID: resource.DefaultFakeUUID, + CreatedAtInSec: 1, + Name: "foo bar", + Parameters: "[]", + Status: model.PipelineReady, + DefaultVersionId: resource.DefaultFakeUUID, + DefaultVersion: &model.PipelineVersion{ + UUID: resource.DefaultFakeUUID, + CreatedAtInSec: 1, + Name: "foo bar", + Parameters: "[]", + Status: model.PipelineVersionReady, + PipelineId: resource.DefaultFakeUUID, + }, + Description: "description of foo bar", + }} + pkg, total_size, str, err := clientManager.PipelineStore().ListPipelines(opts) + assert.Nil(t, err) + assert.Equal(t, 1, total_size) + assert.Equal(t, str, "") + assert.Equal(t, pkgsExpect, pkg) +} + +func TestUploadPipelineVersion_GetFromFileError(t *testing.T) { + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + server := PipelineUploadServer{resourceManager: resourceManager} + b := &bytes.Buffer{} + w := multipart.NewWriter(b) + part, _ := w.CreateFormFile("uploadfile", "hello-world.yaml") + io.Copy(part, bytes.NewBufferString("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + w.Close() + req, _ := http.NewRequest("POST", "/apis/v1beta1/pipelines/upload", bytes.NewReader(b.Bytes())) + req.Header.Set("Content-Type", w.FormDataContentType()) + rr := httptest.NewRecorder() + handler := http.HandlerFunc(server.UploadPipeline) + handler.ServeHTTP(rr, req) + // Upload a new version under this pipeline + + // Set the fake uuid generator with a new uuid to avoid generate a same uuid as above. + clientManager.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(fakeVersionUUID, nil)) + resourceManager = resource.NewResourceManager(clientManager) + server = PipelineUploadServer{resourceManager: resourceManager} + b = &bytes.Buffer{} + b.WriteString("I am invalid file") + w = multipart.NewWriter(b) + w.CreateFormFile("uploadfile", "hello-world.yaml") + w.Close() + req, _ = http.NewRequest("POST", "/apis/v1beta1/pipelines/upload_version?name="+fakeVersionName+"&pipelineid="+resource.DefaultFakeUUID, bytes.NewReader(b.Bytes())) + req.Header.Set("Content-Type", w.FormDataContentType()) + rr = httptest.NewRecorder() + handler = http.HandlerFunc(server.UploadPipelineVersion) + handler.ServeHTTP(rr, req) + + assert.Equal(t, 400, rr.Code) + assert.Contains(t, string(rr.Body.Bytes()), "Failed to read pipeline version") +} + +func TestUploadPipelineVersion_FileNameTooLong(t *testing.T) { + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + server := PipelineUploadServer{resourceManager: resourceManager} + b := &bytes.Buffer{} + w := multipart.NewWriter(b) + part, _ := w.CreateFormFile("uploadfile", "hello-world.yaml") + io.Copy(part, bytes.NewBufferString("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + w.Close() + req, _ := http.NewRequest("POST", "/apis/v1beta1/pipelines/upload", bytes.NewReader(b.Bytes())) + req.Header.Set("Content-Type", w.FormDataContentType()) + rr := httptest.NewRecorder() + handler := http.HandlerFunc(server.UploadPipeline) + handler.ServeHTTP(rr, req) + + // Upload a new version under this pipeline + + // Set the fake uuid generator with a new uuid to avoid generate a same uuid as above. + clientManager.UpdateUUID(util.NewFakeUUIDGeneratorOrFatal(fakeVersionUUID, nil)) + resourceManager = resource.NewResourceManager(clientManager) + server = PipelineUploadServer{resourceManager: resourceManager} + encodedName := url.PathEscape( + "this is a loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog name") + req, _ = http.NewRequest("POST", "/apis/v1beta1/pipelines/upload_version?name="+encodedName+"&pipelineid="+resource.DefaultFakeUUID, bytes.NewReader(b.Bytes())) + req.Header.Set("Content-Type", w.FormDataContentType()) + rr = httptest.NewRecorder() + handler = http.HandlerFunc(server.UploadPipelineVersion) + handler.ServeHTTP(rr, req) + assert.Equal(t, 400, rr.Code) + assert.Contains(t, string(rr.Body.Bytes()), "Pipeline name too long") +} diff --git a/backend/src/apiserver/server/run_metric_util.go b/backend/src/apiserver/server/run_metric_util.go index dcabe37a81b..a9ca16822d2 100644 --- a/backend/src/apiserver/server/run_metric_util.go +++ b/backend/src/apiserver/server/run_metric_util.go @@ -24,7 +24,12 @@ import ( ) const ( - metricNamePattern = "^[a-z]([-a-z0-9]{0,62}[a-z0-9])?$" + // This regex expresses the following constraints: + // * Allows lowercase/uppercase letters + // * Allows "_", "-" and numbers in the middle + // * Additionally, numbers are also allowed at the end + // * At most 64 characters + metricNamePattern = "^[a-zA-Z]([-_a-zA-Z0-9]{0,62}[a-zA-Z0-9])?$" ) // ValidateRunMetric validates RunMetric fields from request. diff --git a/backend/src/apiserver/server/run_metric_util_test.go b/backend/src/apiserver/server/run_metric_util_test.go index 16cb5973d75..a2ea50b23bc 100644 --- a/backend/src/apiserver/server/run_metric_util_test.go +++ b/backend/src/apiserver/server/run_metric_util_test.go @@ -76,12 +76,21 @@ func TestValidateRunMetric_InvalidNodeIDs(t *testing.T) { } func TestNewReportRunMetricResult_OK(t *testing.T) { - expected := newReportRunMetricResult("metric-1", "node-1") - expected.Status = api.ReportRunMetricsResponse_ReportRunMetricResult_OK + tests := []struct { + metricName string + }{ + {"metric-1"}, + {"Metric_2"}, + {"Metric3Name"}, + } - actual := NewReportRunMetricResult(expected.GetMetricName(), expected.GetMetricNodeId(), nil) + for _, tc := range tests { + expected := newReportRunMetricResult(tc.metricName, "node-1") + expected.Status = api.ReportRunMetricsResponse_ReportRunMetricResult_OK + actual := NewReportRunMetricResult(expected.GetMetricName(), expected.GetMetricNodeId(), nil) - assert.Equal(t, expected, actual) + assert.Equalf(t, expected, actual, "TestNewReportRunMetricResult_OK metric name '%s' should be OK", tc.metricName) + } } func TestNewReportRunMetricResult_UnknownError(t *testing.T) { diff --git a/backend/src/apiserver/server/run_server.go b/backend/src/apiserver/server/run_server.go index e39b780485d..1ebe53ac617 100644 --- a/backend/src/apiserver/server/run_server.go +++ b/backend/src/apiserver/server/run_server.go @@ -16,11 +16,14 @@ package server import ( "context" + "github.com/golang/protobuf/ptypes/empty" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/pkg/errors" ) type RunServer struct { @@ -32,6 +35,11 @@ func (s *RunServer) CreateRun(ctx context.Context, request *api.CreateRunRequest if err != nil { return nil, util.Wrap(err, "Validate create run request failed.") } + err = CanAccessExperimentInResourceReferences(s.resourceManager, ctx, request.Run.ResourceReferences) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + run, err := s.resourceManager.CreateRun(request.Run) if err != nil { return nil, util.Wrap(err, "Failed to create a new run.") @@ -40,6 +48,10 @@ func (s *RunServer) CreateRun(ctx context.Context, request *api.CreateRunRequest } func (s *RunServer) GetRun(ctx context.Context, request *api.GetRunRequest) (*api.RunDetail, error) { + err := s.canAccessRun(ctx, request.RunId) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } run, err := s.resourceManager.GetRun(request.RunId) if err != nil { return nil, err @@ -49,7 +61,6 @@ func (s *RunServer) GetRun(ctx context.Context, request *api.GetRunRequest) (*ap func (s *RunServer) ListRuns(ctx context.Context, request *api.ListRunsRequest) (*api.ListRunsResponse, error) { opts, err := validatedListOptions(&model.Run{}, request.PageToken, int(request.PageSize), request.SortBy, request.Filter) - if err != nil { return nil, util.Wrap(err, "Failed to create list options") } @@ -58,6 +69,36 @@ func (s *RunServer) ListRuns(ctx context.Context, request *api.ListRunsRequest) if err != nil { return nil, util.Wrap(err, "Validating filter failed.") } + + if common.IsMultiUserMode() { + refKey := filterContext.ReferenceKey + if refKey == nil { + return nil, util.NewInvalidInputError("ListRuns must filter by resource reference in multi-user mode.") + } + if refKey.Type == common.Namespace { + namespace := refKey.ID + if len(namespace) == 0 { + return nil, util.NewInvalidInputError("Invalid resource references for ListRuns. Namespace is empty.") + } + err = isAuthorized(s.resourceManager, ctx, namespace) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize with namespace resource reference.") + } + } else if refKey.Type == common.Experiment || refKey.Type == "ExperimentUUID" { + // "ExperimentUUID" was introduced for perf optimization. We accept both refKey.Type for backward-compatible reason. + experimentID := refKey.ID + if len(experimentID) == 0 { + return nil, util.NewInvalidInputError("Invalid resource references for run. Experiment ID is empty.") + } + err = CanAccessExperiment(s.resourceManager, ctx, experimentID) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize with experiment resource reference.") + } + } else { + return nil, util.NewInvalidInputError("Invalid resource references for ListRuns. Got %+v", request.ResourceReferenceKey) + } + } + runs, total_size, nextPageToken, err := s.resourceManager.ListRuns(filterContext, opts) if err != nil { return nil, util.Wrap(err, "Failed to list runs.") @@ -66,7 +107,11 @@ func (s *RunServer) ListRuns(ctx context.Context, request *api.ListRunsRequest) } func (s *RunServer) ArchiveRun(ctx context.Context, request *api.ArchiveRunRequest) (*empty.Empty, error) { - err := s.resourceManager.ArchiveRun(request.Id) + err := s.canAccessRun(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + err = s.resourceManager.ArchiveRun(request.Id) if err != nil { return nil, err } @@ -74,7 +119,11 @@ func (s *RunServer) ArchiveRun(ctx context.Context, request *api.ArchiveRunReque } func (s *RunServer) UnarchiveRun(ctx context.Context, request *api.UnarchiveRunRequest) (*empty.Empty, error) { - err := s.resourceManager.UnarchiveRun(request.Id) + err := s.canAccessRun(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + err = s.resourceManager.UnarchiveRun(request.Id) if err != nil { return nil, err } @@ -82,7 +131,11 @@ func (s *RunServer) UnarchiveRun(ctx context.Context, request *api.UnarchiveRunR } func (s *RunServer) DeleteRun(ctx context.Context, request *api.DeleteRunRequest) (*empty.Empty, error) { - err := s.resourceManager.DeleteRun(request.Id) + err := s.canAccessRun(ctx, request.Id) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + err = s.resourceManager.DeleteRun(request.Id) if err != nil { return nil, err } @@ -128,13 +181,20 @@ func (s *RunServer) validateCreateRunRequest(request *api.CreateRunRequest) erro } if err := ValidatePipelineSpec(s.resourceManager, run.PipelineSpec); err != nil { - return util.Wrap(err, "The pipeline spec is invalid.") + if _, errResourceReference := CheckPipelineVersionReference(s.resourceManager, run.ResourceReferences); errResourceReference != nil { + return util.Wrap(err, "Neither pipeline spec nor pipeline version is valid. "+errResourceReference.Error()) + } + return nil } return nil } func (s *RunServer) TerminateRun(ctx context.Context, request *api.TerminateRunRequest) (*empty.Empty, error) { - err := s.resourceManager.TerminateRun(request.RunId) + err := s.canAccessRun(ctx, request.RunId) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + err = s.resourceManager.TerminateRun(request.RunId) if err != nil { return nil, err } @@ -142,7 +202,11 @@ func (s *RunServer) TerminateRun(ctx context.Context, request *api.TerminateRunR } func (s *RunServer) RetryRun(ctx context.Context, request *api.RetryRunRequest) (*empty.Empty, error) { - err := s.resourceManager.RetryRun(request.RunId) + err := s.canAccessRun(ctx, request.RunId) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize the request.") + } + err = s.resourceManager.RetryRun(request.RunId) if err != nil { return nil, err } @@ -150,6 +214,26 @@ func (s *RunServer) RetryRun(ctx context.Context, request *api.RetryRunRequest) } +func (s *RunServer) canAccessRun(ctx context.Context, runId string) error { + if common.IsMultiUserMode() == false { + // Skip authz if not multi-user mode. + return nil + } + namespace, err := s.resourceManager.GetNamespaceFromRunID(runId) + if err != nil { + return util.Wrap(err, "Failed to authorize with the run Id.") + } + if len(namespace) == 0 { + return util.NewInternalServerError(errors.New("There is no namespace found"), "There is no namespace found") + } + + err = isAuthorized(s.resourceManager, ctx, namespace) + if err != nil { + return util.Wrap(err, "Failed to authorize with API resource references") + } + return nil +} + func NewRunServer(resourceManager *resource.ResourceManager) *RunServer { return &RunServer{resourceManager: resourceManager} } diff --git a/backend/src/apiserver/server/run_server_test.go b/backend/src/apiserver/server/run_server_test.go index 2dd81bd09bd..c53a81625a7 100644 --- a/backend/src/apiserver/server/run_server_test.go +++ b/backend/src/apiserver/server/run_server_test.go @@ -2,14 +2,19 @@ package server import ( "context" + "strings" "testing" "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" "github.com/golang/protobuf/ptypes/timestamp" + "github.com/google/go-cmp/cmp" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/spf13/viper" "github.com/stretchr/testify/assert" "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" ) func TestCreateRun(t *testing.T) { @@ -17,7 +22,7 @@ func TestCreateRun(t *testing.T) { defer clients.Close() server := NewRunServer(manager) run := &api.Run{ - Name: "123", + Name: "run1", ResourceReferences: validReference, PipelineSpec: &api.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), @@ -31,15 +36,17 @@ func TestCreateRun(t *testing.T) { expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{ {Name: "param1", Value: util.StringPointer("world")}} expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} + expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} expectedRuntimeWorkflow.Spec.ServiceAccountName = "pipeline-runner" expectedRunDetail := api.RunDetail{ Run: &api.Run{ - Id: "123e4567-e89b-12d3-a456-426655440000", - Name: "123", - StorageState: api.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{}, - FinishedAt: ×tamp.Timestamp{}, + Id: "123e4567-e89b-12d3-a456-426655440000", + Name: "run1", + ServiceAccount: "pipeline-runner", + StorageState: api.Run_STORAGESTATE_AVAILABLE, + CreatedAt: ×tamp.Timestamp{Seconds: 2}, + ScheduledAt: ×tamp.Timestamp{}, + FinishedAt: ×tamp.Timestamp{}, PipelineSpec: &api.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), Parameters: []*api.Parameter{{Name: "param1", Value: "world"}}, @@ -47,7 +54,140 @@ func TestCreateRun(t *testing.T) { ResourceReferences: []*api.ResourceReference{ { Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, - Name: "123", Relationship: api.Relationship_OWNER, + Name: "exp1", Relationship: api.Relationship_OWNER, + }, + }, + }, + PipelineRuntime: &api.PipelineRuntime{ + WorkflowManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), + }, + } + assert.Equal(t, expectedRunDetail, *runDetail) +} + +func TestCreateRunPatch(t *testing.T) { + clients, manager, experiment := initWithExperiment(t) + defer clients.Close() + server := NewRunServer(manager) + run := &api.Run{ + Name: "run1", + ResourceReferences: validReference, + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflowPatch.ToStringForStore(), + Parameters: []*api.Parameter{ + {Name: "param1", Value: "test-default-bucket"}, + {Name: "param2", Value: "test-project-id"}}, + }, + } + runDetail, err := server.CreateRun(nil, &api.CreateRunRequest{Run: run}) + assert.Nil(t, err) + + expectedRuntimeWorkflow := testWorkflowPatch.DeepCopy() + expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{ + {Name: "param1", Value: util.StringPointer("test-default-bucket")}, + {Name: "param2", Value: util.StringPointer("test-project-id")}, + } + expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} + expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} + expectedRuntimeWorkflow.Spec.ServiceAccountName = "pipeline-runner" + expectedRunDetail := api.RunDetail{ + Run: &api.Run{ + Id: "123e4567-e89b-12d3-a456-426655440000", + Name: "run1", + ServiceAccount: "pipeline-runner", + StorageState: api.Run_STORAGESTATE_AVAILABLE, + CreatedAt: ×tamp.Timestamp{Seconds: 2}, + ScheduledAt: ×tamp.Timestamp{}, + FinishedAt: ×tamp.Timestamp{}, + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflowPatch.ToStringForStore(), + Parameters: []*api.Parameter{ + {Name: "param1", Value: "test-default-bucket"}, + {Name: "param2", Value: "test-project-id"}}, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Name: "exp1", Relationship: api.Relationship_OWNER, + }, + }, + }, + PipelineRuntime: &api.PipelineRuntime{ + WorkflowManifest: util.NewWorkflow(expectedRuntimeWorkflow).ToStringForStore(), + }, + } + assert.Equal(t, expectedRunDetail, *runDetail) +} + +func TestCreateRun_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment_KFAM_Unauthorized(t) + defer clients.Close() + server := NewRunServer(manager) + run := &api.Run{ + Name: "run1", + ResourceReferences: validReference, + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflow.ToStringForStore(), + Parameters: []*api.Parameter{{Name: "param1", Value: "world"}}, + }, + } + _, err := server.CreateRun(ctx, &api.CreateRunRequest{Run: run}) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestCreateRun_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + viper.Set(common.DefaultPipelineRunnerServiceAccount, "default-editor") + defer viper.Set(common.MultiUserMode, "false") + defer viper.Set(common.DefaultPipelineRunnerServiceAccount, "pipeline-runner") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, experiment := initWithExperiment(t) + defer clients.Close() + server := NewRunServer(manager) + run := &api.Run{ + Name: "run1", + ResourceReferences: validReference, + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflow.ToStringForStore(), + Parameters: []*api.Parameter{{Name: "param1", Value: "world"}}, + }, + } + runDetail, err := server.CreateRun(ctx, &api.CreateRunRequest{Run: run}) + assert.Nil(t, err) + + expectedRuntimeWorkflow := testWorkflow.DeepCopy() + expectedRuntimeWorkflow.Spec.Arguments.Parameters = []v1alpha1.Parameter{ + {Name: "param1", Value: util.StringPointer("world")}} + expectedRuntimeWorkflow.Labels = map[string]string{util.LabelKeyWorkflowRunId: "123e4567-e89b-12d3-a456-426655440000"} + expectedRuntimeWorkflow.Annotations = map[string]string{util.AnnotationKeyRunName: "run1"} + expectedRuntimeWorkflow.Spec.ServiceAccountName = "default-editor" // In multi-user mode, we use default service account. + expectedRunDetail := api.RunDetail{ + Run: &api.Run{ + Id: "123e4567-e89b-12d3-a456-426655440000", + Name: "run1", + ServiceAccount: "default-editor", + StorageState: api.Run_STORAGESTATE_AVAILABLE, + CreatedAt: ×tamp.Timestamp{Seconds: 2}, + ScheduledAt: ×tamp.Timestamp{}, + FinishedAt: ×tamp.Timestamp{}, + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflow.ToStringForStore(), + Parameters: []*api.Parameter{{Name: "param1", Value: "world"}}, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Name: "exp1", Relationship: api.Relationship_OWNER, }, }, }, @@ -63,7 +203,7 @@ func TestListRun(t *testing.T) { defer clients.Close() server := NewRunServer(manager) run := &api.Run{ - Name: "123", + Name: "run1", ResourceReferences: validReference, PipelineSpec: &api.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), @@ -74,12 +214,13 @@ func TestListRun(t *testing.T) { assert.Nil(t, err) expectedRun := &api.Run{ - Id: "123e4567-e89b-12d3-a456-426655440000", - Name: "123", - StorageState: api.Run_STORAGESTATE_AVAILABLE, - CreatedAt: ×tamp.Timestamp{Seconds: 2}, - ScheduledAt: ×tamp.Timestamp{}, - FinishedAt: ×tamp.Timestamp{}, + Id: "123e4567-e89b-12d3-a456-426655440000", + Name: "run1", + ServiceAccount: "pipeline-runner", + StorageState: api.Run_STORAGESTATE_AVAILABLE, + CreatedAt: ×tamp.Timestamp{Seconds: 2}, + ScheduledAt: ×tamp.Timestamp{}, + FinishedAt: ×tamp.Timestamp{}, PipelineSpec: &api.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), Parameters: []*api.Parameter{{Name: "param1", Value: "world"}}, @@ -87,7 +228,7 @@ func TestListRun(t *testing.T) { ResourceReferences: []*api.ResourceReference{ { Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, - Name: "123", Relationship: api.Relationship_OWNER, + Name: "exp1", Relationship: api.Relationship_OWNER, }, }, } @@ -96,12 +237,158 @@ func TestListRun(t *testing.T) { assert.Equal(t, expectedRun, listRunsResponse.Runs[0]) } +func TestListRuns_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, _ := initWithExperiment_KFAM_Unauthorized(t) + defer clients.Close() + server := NewRunServer(manager) + _, err := server.ListRuns(ctx, &api.ListRunsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "ns1", + }, + }) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestListRuns_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clients, manager, experiment := initWithExperiment(t) + defer clients.Close() + server := NewRunServer(manager) + run := &api.Run{ + Name: "run1", + ResourceReferences: validReference, + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflow.ToStringForStore(), + Parameters: []*api.Parameter{{Name: "param1", Value: "world"}}, + }, + } + _, err := server.CreateRun(ctx, &api.CreateRunRequest{Run: run}) + assert.Nil(t, err) + + expectedRuns := []*api.Run{{ + Id: "123e4567-e89b-12d3-a456-426655440000", + Name: "run1", + ServiceAccount: "pipeline-runner", + StorageState: api.Run_STORAGESTATE_AVAILABLE, + CreatedAt: ×tamp.Timestamp{Seconds: 2}, + ScheduledAt: ×tamp.Timestamp{}, + FinishedAt: ×tamp.Timestamp{}, + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflow.ToStringForStore(), + Parameters: []*api.Parameter{{Name: "param1", Value: "world"}}, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Name: "exp1", Relationship: api.Relationship_OWNER, + }, + }, + }} + expectedRunsEmpty := []*api.Run{} + + tests := []struct { + name string + request *api.ListRunsRequest + wantError bool + errorMessage string + expectedRuns []*api.Run + }{ + { + "Valid - filter by experiment", + &api.ListRunsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, + Id: "123e4567-e89b-12d3-a456-426655440000", + }, + }, + false, + "", + expectedRuns, + }, + { + "Valid - filter by namespace", + &api.ListRunsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "ns1", + }, + }, + false, + "", + expectedRuns, + }, + { + "Vailid - filter by namespace - no result", + &api.ListRunsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, + Id: "no-such-ns", + }, + }, + false, + "", + expectedRunsEmpty, + }, + { + "Invalid - no filter", + &api.ListRunsRequest{}, + true, + "ListRuns must filter by resource reference", + nil, + }, + { + "Inalid - invalid filter type", + &api.ListRunsRequest{ + ResourceReferenceKey: &api.ResourceKey{ + Type: api.ResourceType_UNKNOWN_RESOURCE_TYPE, + Id: "unknown", + }, + }, + true, + "Unrecognized resource reference type", + nil, + }, + } + + for _, tc := range tests { + response, err := server.ListRuns(ctx, tc.request) + + if tc.wantError { + if err == nil { + t.Errorf("TestListRuns_Multiuser(%v) expect error but got nil", tc.name) + } else if !strings.Contains(err.Error(), tc.errorMessage) { + t.Errorf("TestListRuns_Multiusert(%v) expect error containing: %v, but got: %v", tc.name, tc.errorMessage, err) + } + } else { + if err != nil { + t.Errorf("TestListRuns_Multiuser(%v) expect no error but got %v", tc.name, err) + } else if !cmp.Equal(tc.expectedRuns, response.Runs) { + t.Errorf("TestListRuns_Multiuser(%v) expect (%+v) but got (%+v)", tc.name, tc.expectedRuns, response.Runs) + } + } + } + +} + func TestValidateCreateRunRequest(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() server := NewRunServer(manager) run := &api.Run{ - Name: "123", + Name: "run1", ResourceReferences: validReference, PipelineSpec: &api.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), @@ -112,6 +399,18 @@ func TestValidateCreateRunRequest(t *testing.T) { assert.Nil(t, err) } +func TestValidateCreateRunRequest_WithPipelineVersionReference(t *testing.T) { + clients, manager, _ := initWithExperimentAndPipelineVersion(t) + defer clients.Close() + server := NewRunServer(manager) + run := &api.Run{ + Name: "run1", + ResourceReferences: validReferencesOfExperimentAndPipelineVersion, + } + err := server.validateCreateRunRequest(&api.CreateRunRequest{Run: run}) + assert.Nil(t, err) +} + func TestValidateCreateRunRequest_EmptyName(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() @@ -133,7 +432,7 @@ func TestValidateCreateRunRequest_NoExperiment(t *testing.T) { defer clients.Close() server := NewRunServer(manager) run := &api.Run{ - Name: "123", + Name: "run1", ResourceReferences: nil, PipelineSpec: &api.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), @@ -144,17 +443,17 @@ func TestValidateCreateRunRequest_NoExperiment(t *testing.T) { assert.Nil(t, err) } -func TestValidateCreateRunRequest_EmptyPipelineSpec(t *testing.T) { +func TestValidateCreateRunRequest_EmptyPipelineSpecAndEmptyPipelineVersion(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() server := NewRunServer(manager) run := &api.Run{ - Name: "123", + Name: "run1", ResourceReferences: validReference, } err := server.validateCreateRunRequest(&api.CreateRunRequest{Run: run}) assert.NotNil(t, err) - assert.Contains(t, err.Error(), "Please specify a pipeline by providing a pipeline ID or workflow manifest") + assert.Contains(t, err.Error(), "Neither pipeline spec nor pipeline version is valid") } func TestValidateCreateRunRequest_TooMuchParameters(t *testing.T) { @@ -168,7 +467,7 @@ func TestValidateCreateRunRequest_TooMuchParameters(t *testing.T) { params = append(params, &api.Parameter{Name: "param2", Value: "world"}) } run := &api.Run{ - Name: "123", + Name: "run1", ResourceReferences: validReference, PipelineSpec: &api.PipelineSpec{ WorkflowManifest: testWorkflow.ToStringForStore(), @@ -290,3 +589,72 @@ func TestReportRunMetrics_PartialFailures(t *testing.T) { } assert.Equal(t, expectedResponse, response) } + +func TestCanAccessRun_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + clients, manager, experiment := initWithExperiment_KFAM_Unauthorized(t) + defer clients.Close() + runServer := RunServer{resourceManager: manager} + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + apiRun := &api.Run{ + Name: "run1", + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflow.ToStringForStore(), + Parameters: []*api.Parameter{ + {Name: "param1", Value: "world"}, + }, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns"}, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER, + }, + }, + } + runDetail, _ := manager.CreateRun(apiRun) + + err := runServer.canAccessRun(ctx, runDetail.UUID) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestCanAccessRun_Authorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + clients, manager, experiment := initWithExperiment(t) + defer clients.Close() + runServer := RunServer{resourceManager: manager} + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + apiRun := &api.Run{ + Name: "run1", + PipelineSpec: &api.PipelineSpec{ + WorkflowManifest: testWorkflow.ToStringForStore(), + Parameters: []*api.Parameter{ + {Name: "param1", Value: "world"}, + }, + }, + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_EXPERIMENT, Id: experiment.UUID}, + Relationship: api.Relationship_OWNER, + }, + }, + } + runDetail, _ := manager.CreateRun(apiRun) + + err := runServer.canAccessRun(ctx, runDetail.UUID) + assert.Nil(t, err) +} diff --git a/backend/src/apiserver/server/test/arguments_tarball/arguments-version.tar.gz b/backend/src/apiserver/server/test/arguments_tarball/arguments-version.tar.gz new file mode 100644 index 00000000000..2b5c414ae1a Binary files /dev/null and b/backend/src/apiserver/server/test/arguments_tarball/arguments-version.tar.gz differ diff --git a/backend/src/apiserver/server/test/patched-arguments-parameters.yaml b/backend/src/apiserver/server/test/patched-arguments-parameters.yaml new file mode 100644 index 00000000000..87402fbcb76 --- /dev/null +++ b/backend/src/apiserver/server/test/patched-arguments-parameters.yaml @@ -0,0 +1,36 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: argoproj.io/v1alpha1 +kind: Workflow +metadata: + generateName: arguments-parameters- +spec: + entrypoint: whalesay + arguments: + parameters: + - name: param1 + value: new-hello + - name: param2 + + templates: + - name: whalesay + inputs: + parameters: + - name: param1 + - name: param2 + container: + image: docker/whalesay:latest + command: [cowsay] + args: ["{{inputs.parameters.param1}}-{{inputs.parameters.param2}}"] diff --git a/backend/src/apiserver/server/test_util.go b/backend/src/apiserver/server/test_util.go index 1712f571991..da44ab92b0f 100644 --- a/backend/src/apiserver/server/test_util.go +++ b/backend/src/apiserver/server/test_util.go @@ -19,9 +19,12 @@ import ( "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/spf13/viper" "github.com/stretchr/testify/assert" "google.golang.org/grpc/codes" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -29,7 +32,7 @@ import ( var testWorkflow = util.NewWorkflow(&v1alpha1.Workflow{ TypeMeta: v1.TypeMeta{APIVersion: "argoproj.io/v1alpha1", Kind: "Workflow"}, - ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "workflow1"}, + ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "workflow1", Namespace: "ns1"}, Spec: v1alpha1.WorkflowSpec{Arguments: v1alpha1.Arguments{Parameters: []v1alpha1.Parameter{{Name: "param1"}}}}, }) @@ -39,6 +42,12 @@ var testWorkflow2 = util.NewWorkflow(&v1alpha1.Workflow{ Spec: v1alpha1.WorkflowSpec{Arguments: v1alpha1.Arguments{Parameters: []v1alpha1.Parameter{{Name: "param1"}}}}, }) +var testWorkflowPatch = util.NewWorkflow(&v1alpha1.Workflow{ + TypeMeta: v1.TypeMeta{APIVersion: "argoproj.io/v1alpha1", Kind: "Workflow"}, + ObjectMeta: v1.ObjectMeta{Name: "workflow-name", UID: "workflow2"}, + Spec: v1alpha1.WorkflowSpec{Arguments: v1alpha1.Arguments{Parameters: []v1alpha1.Parameter{{Name: "param1"}, {Name: "param2"}}}}, +}) + var validReference = []*api.ResourceReference{ { Key: &api.ResourceKey{ @@ -47,15 +56,101 @@ var validReference = []*api.ResourceReference{ }, } +var validReferencesOfExperimentAndPipelineVersion = []*api.ResourceReference{ + { + Key: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, + Id: resource.DefaultFakeUUID, + }, + Relationship: api.Relationship_OWNER, + }, + { + Key: &api.ResourceKey{ + Type: api.ResourceType_PIPELINE_VERSION, + Id: resource.DefaultFakeUUID, + }, + Relationship: api.Relationship_CREATOR, + }, +} + +// This automatically runs before all the tests. +func initEnvVars() { + viper.Set(common.PodNamespace, "ns1") +} + func initWithExperiment(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, *model.Experiment) { + initEnvVars() + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + apiExperiment := &api.Experiment{Name: "exp1"} + if common.IsMultiUserMode() { + apiExperiment = &api.Experiment{ + Name: "exp1", + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + }, + } + } + experiment, err := resourceManager.CreateExperiment(apiExperiment) + assert.Nil(t, err) + return clientManager, resourceManager, experiment +} + +func initWithExperiment_KFAM_Unauthorized(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, *model.Experiment) { + initEnvVars() clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + clientManager.KfamClientFake = client.NewFakeKFAMClientUnauthorized() resourceManager := resource.NewResourceManager(clientManager) - experiment := &model.Experiment{Name: "123"} - experiment, err := resourceManager.CreateExperiment(experiment) + apiExperiment := &api.Experiment{Name: "exp1"} + if common.IsMultiUserMode() { + apiExperiment = &api.Experiment{ + Name: "exp1", + ResourceReferences: []*api.ResourceReference{ + { + Key: &api.ResourceKey{Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + }, + } + } + experiment, err := resourceManager.CreateExperiment(apiExperiment) assert.Nil(t, err) return clientManager, resourceManager, experiment } +func initWithExperimentAndPipelineVersion(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, *model.Experiment) { + initEnvVars() + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + resourceManager := resource.NewResourceManager(clientManager) + + // Create an experiment. + apiExperiment := &api.Experiment{Name: "exp1"} + experiment, err := resourceManager.CreateExperiment(apiExperiment) + assert.Nil(t, err) + + // Create a pipeline and then a pipeline version. + _, err = resourceManager.CreatePipeline("pipeline", "", []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + assert.Nil(t, err) + _, err = resourceManager.CreatePipelineVersion(&api.PipelineVersion{ + Name: "pipeline_version", + ResourceReferences: []*api.ResourceReference{ + &api.ResourceReference{ + Key: &api.ResourceKey{ + Id: resource.DefaultFakeUUID, + Type: api.ResourceType_PIPELINE, + }, + Relationship: api.Relationship_OWNER, + }, + }, + }, + []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) + + return clientManager, resourceManager, experiment +} + func initWithOneTimeRun(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, *model.RunDetail) { clientManager, manager, exp := initWithExperiment(t) apiRun := &api.Run{ @@ -80,6 +175,7 @@ func initWithOneTimeRun(t *testing.T) (*resource.FakeClientManager, *resource.Re // Util function to create an initial state with pipeline uploaded func initWithPipeline(t *testing.T) (*resource.FakeClientManager, *resource.ResourceManager, *model.Pipeline) { + initEnvVars() store := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) manager := resource.NewResourceManager(store) p, err := manager.CreatePipeline("p1", "", []byte(testWorkflow.ToStringForStore())) diff --git a/backend/src/apiserver/server/util.go b/backend/src/apiserver/server/util.go index bd274c18e50..7450ab21659 100644 --- a/backend/src/apiserver/server/util.go +++ b/backend/src/apiserver/server/util.go @@ -6,14 +6,21 @@ import ( "bufio" "bytes" "compress/gzip" + "context" "encoding/json" - api "github.com/kubeflow/pipelines/backend/api/go_client" - "github.com/kubeflow/pipelines/backend/src/apiserver/resource" - "github.com/kubeflow/pipelines/backend/src/common/util" "io" "io/ioutil" "net/url" + "strconv" "strings" + + "github.com/golang/glog" + api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/apiserver/resource" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/pkg/errors" + "google.golang.org/grpc/metadata" ) // These are valid conditions of a ScheduledWorkflow. @@ -239,4 +246,146 @@ func ValidatePipelineSpec(resourceManager *resource.ResourceManager, spec *api.P return util.NewInvalidInputError("The input parameter length exceed maximum size of %v.", util.MaxParameterBytes) } return nil -} \ No newline at end of file +} + +// Verify that +// (1) a pipeline version is specified in references as a creator. +// (2) the above pipeline version does exists in pipeline version store and is +// in ready status. +func CheckPipelineVersionReference(resourceManager *resource.ResourceManager, references []*api.ResourceReference) (*string, error) { + if references == nil { + return nil, util.NewInvalidInputError("Please specify a pipeline version in Run's resource references") + } + + var pipelineVersionId = "" + for _, reference := range references { + if reference.Key.Type == api.ResourceType_PIPELINE_VERSION && reference.Relationship == api.Relationship_CREATOR { + pipelineVersionId = reference.Key.Id + } + } + if len(pipelineVersionId) == 0 { + return nil, util.NewInvalidInputError("Please specify a pipeline version in Run's resource references") + } + + // Verify pipeline version exists + if _, err := resourceManager.GetPipelineVersion(pipelineVersionId); err != nil { + return nil, util.Wrap(err, "Please specify a valid pipeline version in Run's resource references.") + } + + return &pipelineVersionId, nil +} + +func getUserIdentity(ctx context.Context) (string, error) { + if ctx == nil { + return "", util.NewBadRequestError(errors.New("Request error: context is nil"), "Request error: context is nil.") + } + md, _ := metadata.FromIncomingContext(ctx) + // If the request header contains the user identity, requests are authorized + // based on the namespace field in the request. + if userIdentityHeader, ok := md[common.GetKubeflowUserIDHeader()]; ok { + if len(userIdentityHeader) != 1 { + return "", util.NewBadRequestError(errors.New("Request header error: unexpected number of user identity header. Expect 1 got "+strconv.Itoa(len(userIdentityHeader))), + "Request header error: unexpected number of user identity header. Expect 1 got "+strconv.Itoa(len(userIdentityHeader))) + } + userIdentityHeaderFields := strings.Split(userIdentityHeader[0], ":") + if len(userIdentityHeaderFields) != 2 { + return "", util.NewBadRequestError(errors.New("Request header error: user identity value is incorrectly formatted"), + "Request header error: user identity value is incorrectly formatted") + } + return userIdentityHeaderFields[1], nil + } + return "", util.NewBadRequestError(errors.New("Request header error: there is no user identity header."), "Request header error: there is no user identity header.") +} + +func CanAccessExperiment(resourceManager *resource.ResourceManager, ctx context.Context, experimentID string) error { + if common.IsMultiUserMode() == false { + // Skip authz if not multi-user mode. + return nil + } + + experiment, err := resourceManager.GetExperiment(experimentID) + if err != nil { + return util.NewBadRequestError(errors.New("Invalid experiment ID"), "Failed to get experiment.") + } + if len(experiment.Namespace) == 0 { + return util.NewInternalServerError(errors.New("Missing namespace"), "Experiment %v doesn't have a namespace.", experiment.Name) + } + err = isAuthorized(resourceManager, ctx, experiment.Namespace) + if err != nil { + return util.Wrap(err, "Failed to authorize with API resource references") + } + return nil +} + +func CanAccessExperimentInResourceReferences(resourceManager *resource.ResourceManager, ctx context.Context, resourceRefs []*api.ResourceReference) error { + if common.IsMultiUserMode() == false { + // Skip authz if not multi-user mode. + return nil + } + + experimentID := common.GetExperimentIDFromAPIResourceReferences(resourceRefs) + if len(experimentID) == 0 { + return util.NewBadRequestError(errors.New("Missing experiment"), "Experiment is required for CreateRun/CreateJob.") + } + return CanAccessExperiment(resourceManager, ctx, experimentID) +} + +func CanAccessNamespaceInResourceReferences(resourceManager *resource.ResourceManager, ctx context.Context, resourceRefs []*api.ResourceReference) error { + if common.IsMultiUserMode() == false { + // Skip authz if not multi-user mode. + return nil + } + + namespace := common.GetNamespaceFromAPIResourceReferences(resourceRefs) + if len(namespace) == 0 { + return util.NewBadRequestError(errors.New("Namespace required in Kubeflow deployment for authorization."), "Namespace required in Kubeflow deployment for authorization.") + } + err := isAuthorized(resourceManager, ctx, namespace) + if err != nil { + return util.Wrap(err, "Failed to authorize with API resource references") + } + return nil +} + +func CanAccessNamespace(resourceManager *resource.ResourceManager, ctx context.Context, namespace string) error { + if common.IsMultiUserMode() == false { + // Skip authz if not multi-user mode. + return nil + } + + if len(namespace) == 0 { + return util.NewBadRequestError(errors.New("Namespace required for authorization."), "Namespace required for authorization.") + } + err := isAuthorized(resourceManager, ctx, namespace) + if err != nil { + return util.Wrap(err, "Failed to authorize namespace") + } + return nil +} + +// isAuthorized verified whether the user identity, which is contains in the context object, +// can access the target namespace. If the returned error is nil, the authorization passes. +// Otherwise, Authorization fails with a non-nil error. +func isAuthorized(resourceManager *resource.ResourceManager, ctx context.Context, namespace string) error { + userIdentity, err := getUserIdentity(ctx) + if err != nil { + return util.Wrap(err, "Bad request.") + } + + if len(userIdentity) == 0 { + return util.NewBadRequestError(errors.New("Request header error: user identity is empty."), "Request header error: user identity is empty.") + } + + isAuthorized, err := resourceManager.IsRequestAuthorized(userIdentity, namespace) + if err != nil { + return util.Wrap(err, "Authorization failure.") + } + + if isAuthorized == false { + glog.Infof("Unauthorized access for %s to namespace %s", userIdentity, namespace) + return util.NewBadRequestError(errors.New("Unauthorized access for "+userIdentity+" to namespace "+namespace), "Unauthorized access for "+userIdentity+" to namespace "+namespace) + } + + glog.Infof("Authorized user %s in namespace %s", userIdentity, namespace) + return nil +} diff --git a/backend/src/apiserver/server/util_test.go b/backend/src/apiserver/server/util_test.go index e76473202d1..34dd5010ac5 100644 --- a/backend/src/apiserver/server/util_test.go +++ b/backend/src/apiserver/server/util_test.go @@ -1,15 +1,19 @@ package server import ( + "context" "io/ioutil" "os" "strings" "testing" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/resource" "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/spf13/viper" "github.com/stretchr/testify/assert" + "google.golang.org/grpc/metadata" ) func TestGetPipelineName_QueryStringNotEmpty(t *testing.T) { @@ -332,3 +336,93 @@ func TestValidatePipelineSpec_ParameterTooLong(t *testing.T) { assert.NotNil(t, err) assert.Contains(t, err.Error(), "The input parameter length exceed maximum size") } + +func TestGetUserIdentity(t *testing.T) { + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + userIdentity, err := getUserIdentity(ctx) + assert.Nil(t, err) + assert.Equal(t, "user@google.com", userIdentity) +} + +func TestCanAccessNamespaceInResourceReferences_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + clients, manager, _ := initWithExperiment_KFAM_Unauthorized(t) + defer clients.Close() + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + references := []*api.ResourceReference{ + { + Key: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + } + err := CanAccessNamespaceInResourceReferences(manager, ctx, references) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestCanAccessNamespaceInResourceReferences_Authorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + references := []*api.ResourceReference{ + { + Key: &api.ResourceKey{ + Type: api.ResourceType_NAMESPACE, Id: "ns1"}, + Relationship: api.Relationship_OWNER, + }, + } + err := CanAccessNamespaceInResourceReferences(manager, ctx, references) + assert.Nil(t, err) +} + +func TestCanAccessExperimentInResourceReferences_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + clients, manager, _ := initWithExperiment_KFAM_Unauthorized(t) + defer clients.Close() + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + references := []*api.ResourceReference{ + { + Key: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, Id: resource.DefaultFakeUUID}, + Relationship: api.Relationship_OWNER, + }, + } + err := CanAccessExperimentInResourceReferences(manager, ctx, references) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} + +func TestCanAccessExperiemntInResourceReferences_Authorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + clients, manager, _ := initWithExperiment(t) + defer clients.Close() + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + references := []*api.ResourceReference{ + { + Key: &api.ResourceKey{ + Type: api.ResourceType_EXPERIMENT, Id: resource.DefaultFakeUUID}, + Relationship: api.Relationship_OWNER, + }, + } + err := CanAccessExperimentInResourceReferences(manager, ctx, references) + assert.Nil(t, err) +} diff --git a/backend/src/apiserver/server/visualization_server.go b/backend/src/apiserver/server/visualization_server.go index abaf560f7ad..72bfcd9d7ae 100644 --- a/backend/src/apiserver/server/visualization_server.go +++ b/backend/src/apiserver/server/visualization_server.go @@ -4,25 +4,42 @@ import ( "context" "encoding/json" "fmt" - "github.com/golang/glog" - "github.com/kubeflow/pipelines/backend/api/go_client" - "github.com/kubeflow/pipelines/backend/src/apiserver/resource" - "github.com/kubeflow/pipelines/backend/src/common/util" "io/ioutil" "net/http" "net/url" "strings" + + "github.com/golang/glog" + "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/apiserver/resource" + "github.com/kubeflow/pipelines/backend/src/common/util" +) + +const ( + visualizationServiceName = "VisualizationService.Name" + visualizationServicePort = "VisualizationService.Port" ) type VisualizationServer struct { - resourceManager *resource.ResourceManager - serviceURL string + resourceManager *resource.ResourceManager + serviceURL string } func (s *VisualizationServer) CreateVisualization(ctx context.Context, request *go_client.CreateVisualizationRequest) (*go_client.Visualization, error) { if err := s.validateCreateVisualizationRequest(request); err != nil { return nil, err } + + // In multi-user mode, we allow empty namespace in which case we fall back to use the visualization service in system namespace. + // See getVisualizationServiceURL() for details. + if common.IsMultiUserMode() && len(request.Namespace) > 0 { + err := isAuthorized(s.resourceManager, ctx, request.Namespace) + if err != nil { + return nil, util.Wrap(err, "Failed to authorize on namespace.") + } + } + body, err := s.generateVisualizationFromRequest(request) if err != nil { return nil, err @@ -60,7 +77,8 @@ func (s *VisualizationServer) validateCreateVisualizationRequest(request *go_cli // service to generate HTML visualizations from a request. // It returns the generated HTML as a string and any error that is encountered. func (s *VisualizationServer) generateVisualizationFromRequest(request *go_client.CreateVisualizationRequest) ([]byte, error) { - if !isVisualizationServiceAlive(s.serviceURL) { + serviceURL := s.getVisualizationServiceURL(request) + if !isVisualizationServiceAlive(serviceURL) { return nil, util.NewInternalServerError( fmt.Errorf("service not available"), "Service not available", @@ -69,10 +87,10 @@ func (s *VisualizationServer) generateVisualizationFromRequest(request *go_clien visualizationType := strings.ToLower(go_client.Visualization_Type_name[int32(request.Visualization.Type)]) urlValues := url.Values{ "arguments": {request.Visualization.Arguments}, - "source": {request.Visualization.Source}, - "type": {visualizationType}, + "source": {request.Visualization.Source}, + "type": {visualizationType}, } - resp, err := http.PostForm(s.serviceURL, urlValues) + resp, err := http.PostForm(serviceURL, urlValues) if err != nil { return nil, util.Wrap(err, "Unable to initialize visualization request.") } @@ -87,6 +105,16 @@ func (s *VisualizationServer) generateVisualizationFromRequest(request *go_clien return body, nil } +func (s *VisualizationServer) getVisualizationServiceURL(request *go_client.CreateVisualizationRequest) string { + if common.IsMultiUserMode() && len(request.Namespace) > 0 { + return fmt.Sprintf("http://%s.%s:%s", + common.GetStringConfig(visualizationServiceName), + request.Namespace, + common.GetStringConfig(visualizationServicePort)) + } + return s.serviceURL +} + func isVisualizationServiceAlive(serviceURL string) bool { resp, err := http.Get(serviceURL) if err != nil { @@ -99,7 +127,7 @@ func isVisualizationServiceAlive(serviceURL string) bool { func NewVisualizationServer(resourceManager *resource.ResourceManager, serviceHost string, servicePort string) *VisualizationServer { serviceURL := fmt.Sprintf("http://%s:%s", serviceHost, servicePort) return &VisualizationServer{ - resourceManager: resourceManager, - serviceURL: serviceURL, + resourceManager: resourceManager, + serviceURL: serviceURL, } } diff --git a/backend/src/apiserver/server/visualization_server_test.go b/backend/src/apiserver/server/visualization_server_test.go index c0da193aeea..27d02668e02 100644 --- a/backend/src/apiserver/server/visualization_server_test.go +++ b/backend/src/apiserver/server/visualization_server_test.go @@ -1,18 +1,26 @@ package server import ( - "github.com/kubeflow/pipelines/backend/api/go_client" - "github.com/stretchr/testify/assert" + "context" "net/http" "net/http/httptest" "testing" + + "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" + "github.com/kubeflow/pipelines/backend/src/apiserver/resource" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/spf13/viper" + "github.com/stretchr/testify/assert" + "google.golang.org/grpc/metadata" ) func TestValidateCreateVisualizationRequest(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() server := &VisualizationServer{ - resourceManager: manager, + resourceManager: manager, } visualization := &go_client.Visualization{ Type: go_client.Visualization_ROC_CURVE, @@ -30,7 +38,7 @@ func TestValidateCreateVisualizationRequest_ArgumentsAreEmpty(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() server := &VisualizationServer{ - resourceManager: manager, + resourceManager: manager, } visualization := &go_client.Visualization{ Type: go_client.Visualization_ROC_CURVE, @@ -48,7 +56,7 @@ func TestValidateCreateVisualizationRequest_SourceIsEmpty(t *testing.T) { clients, manager, _ := initWithExperiment(t) defer clients.Close() server := &VisualizationServer{ - resourceManager: manager, + resourceManager: manager, } visualization := &go_client.Visualization{ Type: go_client.Visualization_ROC_CURVE, @@ -66,7 +74,7 @@ func TestValidateCreateVisualizationRequest_SourceIsEmptyAndTypeIsCustom(t *test clients, manager, _ := initWithExperiment(t) defer clients.Close() server := &VisualizationServer{ - resourceManager: manager, + resourceManager: manager, } visualization := &go_client.Visualization{ Type: go_client.Visualization_CUSTOM, @@ -83,7 +91,7 @@ func TestValidateCreateVisualizationRequest_ArgumentsNotValidJSON(t *testing.T) clients, manager, _ := initWithExperiment(t) defer clients.Close() server := &VisualizationServer{ - resourceManager: manager, + resourceManager: manager, } visualization := &go_client.Visualization{ Type: go_client.Visualization_ROC_CURVE, @@ -106,8 +114,8 @@ func TestGenerateVisualization(t *testing.T) { })) defer httpServer.Close() server := &VisualizationServer{ - resourceManager: manager, - serviceURL: httpServer.URL, + resourceManager: manager, + serviceURL: httpServer.URL, } visualization := &go_client.Visualization{ Type: go_client.Visualization_ROC_CURVE, @@ -134,8 +142,8 @@ func TestGenerateVisualization_ServiceNotAvailableError(t *testing.T) { } })) server := &VisualizationServer{ - resourceManager: manager, - serviceURL: httpServer.URL, + resourceManager: manager, + serviceURL: httpServer.URL, } visualization := &go_client.Visualization{ Type: go_client.Visualization_ROC_CURVE, @@ -165,8 +173,8 @@ func TestGenerateVisualization_ServerError(t *testing.T) { })) defer httpServer.Close() server := &VisualizationServer{ - resourceManager: manager, - serviceURL: httpServer.URL, + resourceManager: manager, + serviceURL: httpServer.URL, } visualization := &go_client.Visualization{ Type: go_client.Visualization_ROC_CURVE, @@ -180,3 +188,71 @@ func TestGenerateVisualization_ServerError(t *testing.T) { assert.Nil(t, body) assert.Equal(t, "500 Internal Server Error", err.Error()) } + +func TestGetVisualizationServiceURL(t *testing.T) { + server := &VisualizationServer{ + resourceManager: nil, + serviceURL: "http://host:port", + } + request := &go_client.CreateVisualizationRequest{ + Visualization: nil, + } + url := server.getVisualizationServiceURL(request) + assert.Equal(t, "http://host:port", url) +} + +func TestGetVisualizationServiceURL_Multiuser(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + viper.Set("VisualizationService.Name", "ml-pipeline-visualizationserver") + viper.Set("VisualizationService.Port", "8888") + + server := &VisualizationServer{ + resourceManager: nil, + serviceURL: "http://host:port", + } + + request := &go_client.CreateVisualizationRequest{ + Visualization: nil, + Namespace: "ns1", + } + url := server.getVisualizationServiceURL(request) + assert.Equal(t, "http://ml-pipeline-visualizationserver.ns1:8888", url) + + // when namespace is not provided, we fall back to the default visuliaztion service + request = &go_client.CreateVisualizationRequest{ + Visualization: nil, + } + url = server.getVisualizationServiceURL(request) + assert.Equal(t, "http://host:port", url) +} + +func TestCreateVisualization_Unauthorized(t *testing.T) { + viper.Set(common.MultiUserMode, "true") + defer viper.Set(common.MultiUserMode, "false") + + md := metadata.New(map[string]string{common.GoogleIAPUserIdentityHeader: common.GoogleIAPUserIdentityPrefix + "user@google.com"}) + ctx := metadata.NewIncomingContext(context.Background(), md) + + clientManager := resource.NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + clientManager.KfamClientFake = client.NewFakeKFAMClientUnauthorized() + resourceManager := resource.NewResourceManager(clientManager) + defer clientManager.Close() + + server := &VisualizationServer{ + resourceManager: resourceManager, + } + visualization := &go_client.Visualization{ + Type: go_client.Visualization_ROC_CURVE, + Source: "gs://ml-pipeline/roc/data.csv", + Arguments: "{}", + } + + request := &go_client.CreateVisualizationRequest{ + Visualization: visualization, + Namespace: "ns1", + } + _, err := server.CreateVisualization(ctx, request) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Unauthorized access") +} diff --git a/backend/src/apiserver/storage/BUILD.bazel b/backend/src/apiserver/storage/BUILD.bazel index 26def425b38..a88a1eb9668 100644 --- a/backend/src/apiserver/storage/BUILD.bazel +++ b/backend/src/apiserver/storage/BUILD.bazel @@ -13,7 +13,6 @@ go_library( "minio_client_fake.go", "object_store.go", "object_store_fake.go", - "object_store_util.go", "pipeline_store.go", "resource_reference_store.go", "run_store.go", diff --git a/backend/src/apiserver/storage/db_status_store.go b/backend/src/apiserver/storage/db_status_store.go index 07798a3c304..7101ddeb80d 100644 --- a/backend/src/apiserver/storage/db_status_store.go +++ b/backend/src/apiserver/storage/db_status_store.go @@ -46,10 +46,11 @@ func (s *DBStatusStore) InitializeDBStatusTable() error { tx.Rollback() return util.NewInternalServerError(err, "Failed to load database status.") } - defer rows.Close() + next := rows.Next() + rows.Close() // "rows" shouldn't be used after this point. // The table is not initialized - if !rows.Next() { + if !next { sql, args, queryErr := sq. Insert("db_statuses"). SetMap(defaultDBStatus). diff --git a/backend/src/apiserver/storage/default_experiment_store.go b/backend/src/apiserver/storage/default_experiment_store.go index 6eb5eb5087b..6fe898cff1d 100644 --- a/backend/src/apiserver/storage/default_experiment_store.go +++ b/backend/src/apiserver/storage/default_experiment_store.go @@ -48,10 +48,11 @@ func (s *DefaultExperimentStore) initializeDefaultExperimentTable() error { tx.Rollback() return util.NewInternalServerError(err, "Failed to get default experiment.") } - defer rows.Close() + next := rows.Next() + rows.Close() // If the table is not initialized, then set the default value. - if !rows.Next() { + if !next { sql, args, queryErr := sq. Insert("default_experiments"). SetMap(defaultExperimentDBValue). diff --git a/backend/src/apiserver/storage/experiment_store.go b/backend/src/apiserver/storage/experiment_store.go index ad906f19597..e0aa60e322b 100644 --- a/backend/src/apiserver/storage/experiment_store.go +++ b/backend/src/apiserver/storage/experiment_store.go @@ -7,6 +7,7 @@ import ( sq "github.com/Masterminds/squirrel" "github.com/golang/glog" + api "github.com/kubeflow/pipelines/backend/api/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/list" "github.com/kubeflow/pipelines/backend/src/apiserver/model" @@ -14,10 +15,12 @@ import ( ) type ExperimentStoreInterface interface { - ListExperiments(opts *list.Options) ([]*model.Experiment, int, string, error) + ListExperiments(filterContext *common.FilterContext, opts *list.Options) ([]*model.Experiment, int, string, error) GetExperiment(uuid string) (*model.Experiment, error) CreateExperiment(*model.Experiment) (*model.Experiment, error) DeleteExperiment(uuid string) error + ArchiveExperiment(expId string) error + UnarchiveExperiment(expId string) error } type ExperimentStore struct { @@ -30,13 +33,18 @@ type ExperimentStore struct { // Runs two SQL queries in a transaction to return a list of matching experiments, as well as their // total_size. The total_size does not reflect the page size. -func (s *ExperimentStore) ListExperiments(opts *list.Options) ([]*model.Experiment, int, string, error) { +func (s *ExperimentStore) ListExperiments(filterContext *common.FilterContext, opts *list.Options) ([]*model.Experiment, int, string, error) { errorF := func(err error) ([]*model.Experiment, int, string, error) { return nil, 0, "", util.NewInternalServerError(err, "Failed to list experiments: %v", err) } // SQL for getting the filtered and paginated rows - sqlBuilder := opts.AddFilterToSelect(sq.Select("*").From("experiments")) + sqlBuilder := sq.Select("*").From("experiments") + if filterContext.ReferenceKey != nil && filterContext.ReferenceKey.Type == common.Namespace { + sqlBuilder = sqlBuilder.Where(sq.Eq{"Namespace": filterContext.ReferenceKey.ID}) + } + sqlBuilder = opts.AddFilterToSelect(sqlBuilder) + rowsSql, rowsArgs, err := opts.AddPaginationToSelect(sqlBuilder).ToSql() if err != nil { return errorF(err) @@ -44,7 +52,11 @@ func (s *ExperimentStore) ListExperiments(opts *list.Options) ([]*model.Experime // SQL for getting total size. This matches the query to get all the rows above, in order // to do the same filter, but counts instead of scanning the rows. - sizeSql, sizeArgs, err := opts.AddFilterToSelect(sq.Select("count(*)").From("experiments")).ToSql() + sqlBuilder = sq.Select("count(*)").From("experiments") + if filterContext.ReferenceKey != nil && filterContext.ReferenceKey.Type == common.Namespace { + sqlBuilder = sqlBuilder.Where(sq.Eq{"Namespace": filterContext.ReferenceKey.ID}) + } + sizeSql, sizeArgs, err := opts.AddFilterToSelect(sqlBuilder).ToSql() if err != nil { return errorF(err) } @@ -123,18 +135,25 @@ func (s *ExperimentStore) GetExperiment(uuid string) (*model.Experiment, error) func (s *ExperimentStore) scanRows(rows *sql.Rows) ([]*model.Experiment, error) { var experiments []*model.Experiment for rows.Next() { - var uuid, name, description string + var uuid, name, description, namespace, storageState string var createdAtInSec int64 - err := rows.Scan(&uuid, &name, &description, &createdAtInSec) + err := rows.Scan(&uuid, &name, &description, &createdAtInSec, &namespace, &storageState) if err != nil { - return experiments, nil + return experiments, err } - experiments = append(experiments, &model.Experiment{ + experiment := &model.Experiment{ UUID: uuid, Name: name, Description: description, CreatedAtInSec: createdAtInSec, - }) + Namespace: namespace, + StorageState: storageState, + } + // Since storage state is a field added after initial KFP release, it is possible that existing experiments don't have this field and we use AVAILABLE in that case. + if experiment.StorageState == "" { + experiment.StorageState = api.Experiment_STORAGESTATE_AVAILABLE.String() + } + experiments = append(experiments, experiment) } return experiments, nil } @@ -148,14 +167,25 @@ func (s *ExperimentStore) CreateExperiment(experiment *model.Experiment) (*model return nil, util.NewInternalServerError(err, "Failed to create an experiment id.") } newExperiment.UUID = id.String() + + if newExperiment.StorageState == "" { + // Default to available if not set. + newExperiment.StorageState = api.Experiment_STORAGESTATE_AVAILABLE.String() + } else if newExperiment.StorageState != api.Experiment_STORAGESTATE_AVAILABLE.String() && + newExperiment.StorageState != api.Experiment_STORAGESTATE_ARCHIVED.String() { + return nil, util.NewInvalidInputError("Invalid value for StorageState field: %q.", newExperiment.StorageState) + } + sql, args, err := sq. Insert("experiments"). - SetMap( - sq.Eq{ - "UUID": newExperiment.UUID, - "CreatedAtInSec": newExperiment.CreatedAtInSec, - "Name": newExperiment.Name, - "Description": newExperiment.Description}). + SetMap(sq.Eq{ + "UUID": newExperiment.UUID, + "CreatedAtInSec": newExperiment.CreatedAtInSec, + "Name": newExperiment.Name, + "Description": newExperiment.Description, + "Namespace": newExperiment.Namespace, + "StorageState": newExperiment.StorageState, + }). ToSql() if err != nil { return nil, util.NewInternalServerError(err, "Failed to create query to insert experiment to experiment table: %v", @@ -164,8 +194,8 @@ func (s *ExperimentStore) CreateExperiment(experiment *model.Experiment) (*model _, err = s.db.Exec(sql, args...) if err != nil { if s.db.IsDuplicateError(err) { - return nil, util.NewInvalidInputError( - "Failed to create a new experiment. The name %v already exist. Please specify a new name.", experiment.Name) + return nil, util.NewAlreadyExistError( + "Failed to create a new experiment. The name %v already exists. Please specify a new name.", experiment.Name) } return nil, util.NewInternalServerError(err, "Failed to add experiment to experiment table: %v", err.Error()) @@ -206,12 +236,157 @@ func (s *ExperimentStore) DeleteExperiment(id string) error { return nil } +func (s *ExperimentStore) ArchiveExperiment(expId string) error { + // ArchiveExperiment results in + // 1. The experiment getting archived + // 2. All the runs in the experiment getting archived no matter what previous storage state they are in + sql, args, err := sq. + Update("experiments"). + SetMap(sq.Eq{ + "StorageState": api.Experiment_STORAGESTATE_ARCHIVED.String(), + }). + Where(sq.Eq{"UUID": expId}). + ToSql() + if err != nil { + return util.NewInternalServerError(err, + "Failed to create query to archive experiment %s. error: '%v'", expId, err.Error()) + } + + // TODO(jingzhang36): use inner join to replace nested query for better performance. + filteredRunsSql, filteredRunsArgs, err := sq.Select("ResourceUUID"). + From("resource_references as rf"). + Where(sq.And{ + sq.Eq{"rf.ResourceType": common.Run}, + sq.Eq{"rf.ReferenceUUID": expId}, + sq.Eq{"rf.ReferenceType": common.Experiment}}).ToSql() + if err != nil { + return util.NewInternalServerError(err, + "Failed to create query to filter the runs in an experiment %s. error: '%v'", expId, err.Error()) + } + updateRunsSql, updateRunsArgs, err := sq. + Update("run_details"). + SetMap(sq.Eq{ + "StorageState": api.Run_STORAGESTATE_ARCHIVED.String(), + }). + Where(fmt.Sprintf("UUID in (%s)", filteredRunsSql), filteredRunsArgs...). + ToSql() + if err != nil { + return util.NewInternalServerError(err, + "Failed to create query to archive the runs in an experiment %s. error: '%v'", expId, err.Error()) + } + + updateRunsWithExperimentUUIDSql, updateRunsWithExperimentUUIDArgs, err := sq. + Update("run_details"). + SetMap(sq.Eq{ + "StorageState": api.Run_STORAGESTATE_ARCHIVED.String(), + }). + Where(sq.Eq{"ExperimentUUID": expId}). + Where(sq.NotEq{"StorageState": api.Run_STORAGESTATE_ARCHIVED.String()}). + ToSql() + if err != nil { + return util.NewInternalServerError(err, + "Failed to create query to archive the runs in an experiment %s. error: '%v'", expId, err.Error()) + } + + // TODO(jingzhang36): use inner join to replace nested query for better performance. + filteredJobsSql, filteredJobsArgs, err := sq.Select("ResourceUUID"). + From("resource_references as rf"). + Where(sq.And{ + sq.Eq{"rf.ResourceType": common.Job}, + sq.Eq{"rf.ReferenceUUID": expId}, + sq.Eq{"rf.ReferenceType": common.Experiment}}).ToSql() + if err != nil { + return util.NewInternalServerError(err, + "Failed to create query to filter the jobs in an experiment %s. error: '%v'", expId, err.Error()) + } + now := s.time.Now().Unix() + updateJobsSql, updateJobsArgs, err := sq. + Update("jobs"). + SetMap(sq.Eq{ + "Enabled": false, + "UpdatedAtInSec": now}). + Where(sq.Eq{"Enabled": true}). + Where(fmt.Sprintf("UUID in (%s)", filteredJobsSql), filteredJobsArgs...). + ToSql() + if err != nil { + return util.NewInternalServerError(err, + "Failed to create query to archive the jobs in an experiment %s. error: '%v'", expId, err.Error()) + } + + // In a single transaction, we update experiments, run_details and jobs tables. + tx, err := s.db.Begin() + if err != nil { + return util.NewInternalServerError(err, "Failed to create a new transaction to archive an experiment.") + } + + _, err = tx.Exec(sql, args...) + if err != nil { + tx.Rollback() + return util.NewInternalServerError(err, + "Failed to archive experiment %s. error: '%v'", expId, err.Error()) + } + + _, err = tx.Exec(updateRunsSql, updateRunsArgs...) + if err != nil { + tx.Rollback() + return util.NewInternalServerError(err, + "Failed to archive runs with experiment reference being %s. error: '%v'", expId, err.Error()) + } + + _, err = tx.Exec(updateRunsWithExperimentUUIDSql, updateRunsWithExperimentUUIDArgs...) + if err != nil { + tx.Rollback() + return util.NewInternalServerError(err, + "Failed to archive runs with ExperimentUUID being %s. error: '%v'", expId, err.Error()) + } + + _, err = tx.Exec(updateJobsSql, updateJobsArgs...) + if err != nil { + tx.Rollback() + return util.NewInternalServerError(err, + "Failed to disable all jobs in an experiment %s. error: '%v'", expId, err.Error()) + } + + err = tx.Commit() + if err != nil { + tx.Rollback() + return util.NewInternalServerError(err, "Failed to archive an experiment %s and its runs", expId) + } + + return nil +} + +func (s *ExperimentStore) UnarchiveExperiment(expId string) error { + // UnarchiveExperiment results in + // 1. The experiment getting unarchived + // 2. All the archived runs and disabled jobs will stay archived + sql, args, err := sq. + Update("experiments"). + SetMap(sq.Eq{ + "StorageState": api.Experiment_STORAGESTATE_AVAILABLE.String(), + }). + Where(sq.Eq{"UUID": expId}). + ToSql() + if err != nil { + return util.NewInternalServerError(err, + "Failed to create query to unarchive experiment %s. error: '%v'", expId, err.Error()) + } + + _, err = s.db.Exec(sql, args...) + if err != nil { + return util.NewInternalServerError(err, + "Failed to unarchive experiment %s. error: '%v'", expId, err.Error()) + } + + return nil +} + // factory function for experiment store func NewExperimentStore(db *DB, time util.TimeInterface, uuid util.UUIDGeneratorInterface) *ExperimentStore { return &ExperimentStore{ - db: db, - time: time, - uuid: uuid, + db: db, + time: time, + uuid: uuid, resourceReferenceStore: NewResourceReferenceStore(db), defaultExperimentStore: NewDefaultExperimentStore(db), } diff --git a/backend/src/apiserver/storage/experiment_store_test.go b/backend/src/apiserver/storage/experiment_store_test.go index d23bf51fcab..aa0fbf02964 100644 --- a/backend/src/apiserver/storage/experiment_store_test.go +++ b/backend/src/apiserver/storage/experiment_store_test.go @@ -6,6 +6,7 @@ import ( "fmt" api "github.com/kubeflow/pipelines/backend/api/go_client" + "github.com/kubeflow/pipelines/backend/src/apiserver/common" "github.com/kubeflow/pipelines/backend/src/apiserver/list" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/common/util" @@ -22,7 +23,15 @@ const ( ) func createExperiment(name string) *model.Experiment { - return &model.Experiment{Name: name, Description: fmt.Sprintf("My name is %s", name)} + return createExperimentInNamespace(name, "") +} + +func createExperimentInNamespace(name string, namespace string) *model.Experiment { + return &model.Experiment{ + Name: name, + Description: fmt.Sprintf("My name is %s", name), + Namespace: namespace, + } } func TestListExperiments_Pagination(t *testing.T) { @@ -41,18 +50,20 @@ func TestListExperiments_Pagination(t *testing.T) { CreatedAtInSec: 1, Name: "experiment1", Description: "My name is experiment1", + StorageState: "STORAGESTATE_AVAILABLE", } expectedExperiment4 := &model.Experiment{ UUID: fakeIDFour, CreatedAtInSec: 4, Name: "experiment2", Description: "My name is experiment2", + StorageState: "STORAGESTATE_AVAILABLE", } experimentsExpected := []*model.Experiment{expectedExperiment1, expectedExperiment4} opts, err := list.NewOptions(&model.Experiment{}, 2, "name", nil) assert.Nil(t, err) - experiments, total_size, nextPageToken, err := experimentStore.ListExperiments(opts) + experiments, total_size, nextPageToken, err := experimentStore.ListExperiments(&common.FilterContext{}, opts) assert.Nil(t, err) assert.NotEmpty(t, nextPageToken) @@ -64,19 +75,21 @@ func TestListExperiments_Pagination(t *testing.T) { CreatedAtInSec: 2, Name: "experiment3", Description: "My name is experiment3", + StorageState: "STORAGESTATE_AVAILABLE", } expectedExperiment3 := &model.Experiment{ UUID: fakeIDThree, CreatedAtInSec: 3, Name: "experiment4", Description: "My name is experiment4", + StorageState: "STORAGESTATE_AVAILABLE", } experimentsExpected2 := []*model.Experiment{expectedExperiment2, expectedExperiment3} opts, err = list.NewOptionsFromToken(nextPageToken, 2) assert.Nil(t, err) - experiments, total_size, nextPageToken, err = experimentStore.ListExperiments(opts) + experiments, total_size, nextPageToken, err = experimentStore.ListExperiments(&common.FilterContext{}, opts) assert.Nil(t, err) assert.Empty(t, nextPageToken) assert.Equal(t, 4, total_size) @@ -100,18 +113,20 @@ func TestListExperiments_Pagination_Descend(t *testing.T) { CreatedAtInSec: 2, Name: "experiment3", Description: "My name is experiment3", + StorageState: "STORAGESTATE_AVAILABLE", } expectedExperiment3 := &model.Experiment{ UUID: fakeIDThree, CreatedAtInSec: 3, Name: "experiment4", Description: "My name is experiment4", + StorageState: "STORAGESTATE_AVAILABLE", } experimentsExpected := []*model.Experiment{expectedExperiment3, expectedExperiment2} opts, err := list.NewOptions(&model.Experiment{}, 2, "name desc", nil) assert.Nil(t, err) - experiments, total_size, nextPageToken, err := experimentStore.ListExperiments(opts) + experiments, total_size, nextPageToken, err := experimentStore.ListExperiments(&common.FilterContext{}, opts) assert.Nil(t, err) assert.NotEmpty(t, nextPageToken) @@ -123,19 +138,21 @@ func TestListExperiments_Pagination_Descend(t *testing.T) { CreatedAtInSec: 1, Name: "experiment1", Description: "My name is experiment1", + StorageState: "STORAGESTATE_AVAILABLE", } expectedExperiment4 := &model.Experiment{ UUID: fakeIDFour, CreatedAtInSec: 4, Name: "experiment2", Description: "My name is experiment2", + StorageState: "STORAGESTATE_AVAILABLE", } experimentsExpected2 := []*model.Experiment{expectedExperiment4, expectedExperiment1} opts, err = list.NewOptionsFromToken(nextPageToken, 2) assert.Nil(t, err) - experiments, total_size, nextPageToken, err = experimentStore.ListExperiments(opts) + experiments, total_size, nextPageToken, err = experimentStore.ListExperiments(&common.FilterContext{}, opts) assert.Nil(t, err) assert.Empty(t, nextPageToken) assert.Equal(t, 4, total_size) @@ -152,13 +169,14 @@ func TestListExperiments_Pagination_LessThanPageSize(t *testing.T) { CreatedAtInSec: 1, Name: "experiment1", Description: "My name is experiment1", + StorageState: "STORAGESTATE_AVAILABLE", } experimentsExpected := []*model.Experiment{expectedExperiment1} opts, err := list.NewOptions(&model.Experiment{}, 2, "", nil) assert.Nil(t, err) - experiments, total_size, nextPageToken, err := experimentStore.ListExperiments(opts) + experiments, total_size, nextPageToken, err := experimentStore.ListExperiments(&common.FilterContext{}, opts) assert.Nil(t, err) assert.Equal(t, "", nextPageToken) assert.Equal(t, 1, total_size) @@ -173,7 +191,7 @@ func TestListExperimentsError(t *testing.T) { opts, err := list.NewOptions(&model.Experiment{}, 2, "", nil) assert.Nil(t, err) - _, _, _, err = experimentStore.ListExperiments(opts) + _, _, _, err = experimentStore.ListExperiments(&common.FilterContext{}, opts) assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) } @@ -187,6 +205,7 @@ func TestGetExperiment(t *testing.T) { CreatedAtInSec: 1, Name: "experiment1", Description: "My name is experiment1", + StorageState: "STORAGESTATE_AVAILABLE", } experiment, err := experimentStore.GetExperiment(fakeID) @@ -223,6 +242,7 @@ func TestCreateExperiment(t *testing.T) { CreatedAtInSec: 1, Name: "experiment1", Description: "My name is experiment1", + StorageState: "STORAGESTATE_AVAILABLE", } experiment := createExperiment("experiment1") @@ -231,6 +251,40 @@ func TestCreateExperiment(t *testing.T) { assert.Equal(t, experimentExpected, *experiment, "Got unexpected experiment.") } +func TestCreateExperiment_DifferentNamespaces(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + experimentStore := NewExperimentStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(fakeID, nil)) + experimentExpected := model.Experiment{ + UUID: fakeID, + CreatedAtInSec: 1, + Name: "experiment1", + Description: "My name is experiment1", + Namespace: "namespace1", + StorageState: "STORAGESTATE_AVAILABLE", + } + + experiment := createExperimentInNamespace("experiment1", "namespace1") + experiment, err := experimentStore.CreateExperiment(experiment) + assert.Nil(t, err) + assert.Equal(t, experimentExpected, *experiment, "Got unexpected experiment.") + + experimentStore = NewExperimentStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(fakeIDTwo, nil)) + experiment = createExperimentInNamespace("experiment1", "namespace2") + experimentExpected = model.Experiment{ + UUID: fakeIDTwo, + CreatedAtInSec: 1, + Name: "experiment1", + Description: "My name is experiment1", + Namespace: "namespace2", + StorageState: "STORAGESTATE_AVAILABLE", + } + + experiment, err = experimentStore.CreateExperiment(experiment) + assert.Nil(t, err) + assert.Equal(t, experimentExpected, *experiment, "Got unexpected experiment.") +} + func TestCreateExperiment_DuplicatedKey(t *testing.T) { db := NewFakeDbOrFatal() defer db.Close() @@ -328,7 +382,7 @@ func TestListExperiments_Filtering(t *testing.T) { opts, err := list.NewOptions(&model.Experiment{}, 2, "id", filterProto) assert.Nil(t, err) - experiments, total_size, nextPageToken, err := experimentStore.ListExperiments(opts) + experiments, total_size, nextPageToken, err := experimentStore.ListExperiments(&common.FilterContext{}, opts) expected := []*model.Experiment{ &model.Experiment{ @@ -336,12 +390,14 @@ func TestListExperiments_Filtering(t *testing.T) { CreatedAtInSec: 2, Name: "experiment2", Description: "My name is experiment2", + StorageState: "STORAGESTATE_AVAILABLE", }, &model.Experiment{ UUID: fakeIDThree, CreatedAtInSec: 3, Name: "experiment3", Description: "My name is experiment3", + StorageState: "STORAGESTATE_AVAILABLE", }, } @@ -354,7 +410,7 @@ func TestListExperiments_Filtering(t *testing.T) { opts, err = list.NewOptionsFromToken(nextPageToken, 2) assert.Nil(t, err) - experiments, total_size, nextPageToken, err = experimentStore.ListExperiments(opts) + experiments, total_size, nextPageToken, err = experimentStore.ListExperiments(&common.FilterContext{}, opts) expected = []*model.Experiment{ &model.Experiment{ @@ -362,6 +418,7 @@ func TestListExperiments_Filtering(t *testing.T) { CreatedAtInSec: 4, Name: "experiment4", Description: "My name is experiment4", + StorageState: "STORAGESTATE_AVAILABLE", }, } @@ -371,3 +428,171 @@ func TestListExperiments_Filtering(t *testing.T) { assert.Equal(t, expected, experiments) assert.Equal(t, 3, total_size) } + +func TestArchiveExperiment_InternalError(t *testing.T) { + db := NewFakeDbOrFatal() + experimentStore := NewExperimentStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(fakeID, nil)) + experimentStore.CreateExperiment(createExperiment("experiment1")) + db.Close() + + err := experimentStore.ArchiveExperiment(fakeID) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode(), + "Expected archive experiment to return internal error") +} + +func TestArchiveAndUnarchiveExperiment(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + + // Initial state: 1 experiment and 2 runs in it. + // The experiment is unarchived. + // One run is archived and the other is not. + experimentStore := NewExperimentStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(fakeID, nil)) + experimentStore.CreateExperiment(createExperiment("experiment1")) + runStore := NewRunStore(db, util.NewFakeTimeForEpoch()) + run1 := &model.RunDetail{ + Run: model.Run{ + UUID: "1", + Name: "run1", + DisplayName: "run1", + StorageState: api.Run_STORAGESTATE_AVAILABLE.String(), + Namespace: "n1", + CreatedAtInSec: 1, + ScheduledAtInSec: 1, + Conditions: "Running", + ExperimentUUID: fakeID, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "1", ResourceType: common.Run, + ReferenceUUID: fakeID, ReferenceName: "experiment1", + ReferenceType: common.Experiment, Relationship: common.Creator, + }, + }, + }, + PipelineRuntime: model.PipelineRuntime{ + WorkflowRuntimeManifest: "workflow1", + }, + } + run2 := &model.RunDetail{ + Run: model.Run{ + UUID: "2", + Name: "run2", + DisplayName: "run2", + StorageState: api.Run_STORAGESTATE_ARCHIVED.String(), + Namespace: "n1", + CreatedAtInSec: 2, + ScheduledAtInSec: 2, + Conditions: "done", + ExperimentUUID: fakeID, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "2", ResourceType: common.Run, + ReferenceUUID: fakeID, ReferenceName: "experiment1", + ReferenceType: common.Experiment, Relationship: common.Creator, + }, + }, + }, + PipelineRuntime: model.PipelineRuntime{ + WorkflowRuntimeManifest: "workflow1", + }, + } + runStore.CreateRun(run1) + runStore.CreateRun(run2) + jobStore := NewJobStore(db, util.NewFakeTimeForEpoch()) + job1 := &model.Job{ + UUID: "1", + DisplayName: "pp 1", + Name: "pp1", + Namespace: "n1", + Enabled: true, + Conditions: "ready", + Trigger: model.Trigger{ + PeriodicSchedule: model.PeriodicSchedule{ + PeriodicScheduleStartTimeInSec: util.Int64Pointer(1), + PeriodicScheduleEndTimeInSec: util.Int64Pointer(2), + IntervalSecond: util.Int64Pointer(3), + }, + }, + CreatedAtInSec: 1, + UpdatedAtInSec: 1, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "1", ResourceType: common.Job, ReferenceUUID: fakeID, + ReferenceName: "experiment1", ReferenceType: common.Experiment, + Relationship: common.Owner, + }, + }, + } + job2 := &model.Job{ + UUID: "2", + DisplayName: "pp 2", + Name: "pp2", + Namespace: "n1", + Conditions: "ready", + Trigger: model.Trigger{ + CronSchedule: model.CronSchedule{ + CronScheduleStartTimeInSec: util.Int64Pointer(1), + CronScheduleEndTimeInSec: util.Int64Pointer(2), + Cron: util.StringPointer("1 * *"), + }, + }, + NoCatchup: true, + Enabled: false, + CreatedAtInSec: 2, + UpdatedAtInSec: 2, + ResourceReferences: []*model.ResourceReference{ + { + ResourceUUID: "2", ResourceType: common.Job, + ReferenceUUID: fakeID, ReferenceName: "experiment2", ReferenceType: common.Experiment, + Relationship: common.Owner, + }, + }, + } + jobStore.CreateJob(job1) + jobStore.CreateJob(job2) + + // Archive experiment and verify the experiment and two runs in it are all archived. + err := experimentStore.ArchiveExperiment(fakeID) + assert.Nil(t, err) + exp, err := experimentStore.GetExperiment(fakeID) + assert.Nil(t, err) + assert.Equal(t, api.Experiment_STORAGESTATE_ARCHIVED.String(), exp.StorageState) + opts, err := list.NewOptions(&model.Run{}, 10, "id", nil) + runs, total_run_size, _, err := runStore.ListRuns(&common.FilterContext{ReferenceKey: &common.ReferenceKey{Type: common.Experiment, ID: fakeID}}, opts) + assert.Nil(t, err) + assert.Equal(t, total_run_size, 2) + assert.Equal(t, api.Run_STORAGESTATE_ARCHIVED.String(), runs[0].StorageState) + assert.Equal(t, api.Run_STORAGESTATE_ARCHIVED.String(), runs[1].StorageState) + jobs, total_job_size, _, err := jobStore.ListJobs(&common.FilterContext{ReferenceKey: &common.ReferenceKey{Type: common.Experiment, ID: fakeID}}, opts) + assert.Nil(t, err) + assert.Equal(t, total_job_size, 2) + assert.Equal(t, false, jobs[0].Enabled) + assert.Equal(t, false, jobs[1].Enabled) + + // Unarchive the experiment, and verify the experiment is unarchived while two runs in it stay archived. + err = experimentStore.UnarchiveExperiment(fakeID) + assert.Nil(t, err) + exp, err = experimentStore.GetExperiment(fakeID) + assert.Nil(t, err) + assert.Equal(t, api.Experiment_STORAGESTATE_AVAILABLE.String(), exp.StorageState) + runs, total_run_size, _, err = runStore.ListRuns(&common.FilterContext{ReferenceKey: &common.ReferenceKey{Type: common.Experiment, ID: fakeID}}, opts) + assert.Nil(t, err) + assert.Equal(t, total_run_size, 2) + assert.Equal(t, api.Run_STORAGESTATE_ARCHIVED.String(), runs[0].StorageState) + assert.Equal(t, api.Run_STORAGESTATE_ARCHIVED.String(), runs[1].StorageState) + jobs, total_job_size, _, err = jobStore.ListJobs(&common.FilterContext{ReferenceKey: &common.ReferenceKey{Type: common.Experiment, ID: fakeID}}, opts) + assert.Nil(t, err) + assert.Equal(t, total_job_size, 2) + assert.Equal(t, false, jobs[0].Enabled) + assert.Equal(t, false, jobs[1].Enabled) +} + +func TestUnarchiveExperiment_InternalError(t *testing.T) { + db := NewFakeDbOrFatal() + experimentStore := NewExperimentStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(fakeID, nil)) + db.Close() + + err := experimentStore.UnarchiveExperiment(fakeID) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode(), + "Expected unarchive experiment to return internal error") +} diff --git a/backend/src/apiserver/storage/job_store.go b/backend/src/apiserver/storage/job_store.go index c07ea4a5dab..3f386883a61 100644 --- a/backend/src/apiserver/storage/job_store.go +++ b/backend/src/apiserver/storage/job_store.go @@ -26,8 +26,8 @@ import ( "github.com/kubeflow/pipelines/backend/src/common/util" ) -var jobColumns = []string{"UUID", "DisplayName", "Name", "Namespace", "Description", "MaxConcurrency", - "CreatedAtInSec", "UpdatedAtInSec", "Enabled", "CronScheduleStartTimeInSec", "CronScheduleEndTimeInSec", +var jobColumns = []string{"UUID", "DisplayName", "Name", "Namespace", "ServiceAccount", "Description", "MaxConcurrency", + "NoCatchup", "CreatedAtInSec", "UpdatedAtInSec", "Enabled", "CronScheduleStartTimeInSec", "CronScheduleEndTimeInSec", "Schedule", "PeriodicScheduleStartTimeInSec", "PeriodicScheduleEndTimeInSec", "IntervalSecond", "PipelineId", "PipelineName", "PipelineSpecManifest", "WorkflowSpecManifest", "Parameters", "Conditions", } @@ -51,7 +51,7 @@ type JobStore struct { // total_size. The total_size does not reflect the page size, but it does reflect the number of jobs // matching the supplied filters and resource references. func (s *JobStore) ListJobs( - filterContext *common.FilterContext, opts *list.Options) ([]*model.Job, int, string, error) { + filterContext *common.FilterContext, opts *list.Options) ([]*model.Job, int, string, error) { errorF := func(err error) ([]*model.Job, int, string, error) { return nil, 0, "", util.NewInternalServerError(err, "Failed to list jobs: %v", err) } @@ -111,9 +111,19 @@ func (s *JobStore) ListJobs( } func (s *JobStore) buildSelectJobsQuery(selectCount bool, opts *list.Options, - filterContext *common.FilterContext) (string, []interface{}, error) { - filteredSelectBuilder, err := list.FilterOnResourceReference("jobs", jobColumns, - common.Job, selectCount, filterContext) + filterContext *common.FilterContext) (string, []interface{}, error) { + + var filteredSelectBuilder sq.SelectBuilder + var err error + + refKey := filterContext.ReferenceKey + if refKey != nil && refKey.Type == common.Namespace { + filteredSelectBuilder, err = list.FilterOnNamespace("jobs", jobColumns, + selectCount, refKey.ID) + } else { + filteredSelectBuilder, err = list.FilterOnResourceReference("jobs", jobColumns, + common.Job, selectCount, filterContext) + } if err != nil { return "", nil, util.NewInternalServerError(err, "Failed to list jobs: %v", err) } @@ -173,16 +183,16 @@ func (s *JobStore) addResourceReferences(filteredSelectBuilder sq.SelectBuilder) func (s *JobStore) scanRows(r *sql.Rows) ([]*model.Job, error) { var jobs []*model.Job for r.Next() { - var uuid, displayName, name, namespace, pipelineId, pipelineName, conditions, - description, parameters, pipelineSpecManifest, workflowSpecManifest string + var uuid, displayName, name, namespace, pipelineId, pipelineName, conditions, serviceAccount, + description, parameters, pipelineSpecManifest, workflowSpecManifest string var cronScheduleStartTimeInSec, cronScheduleEndTimeInSec, - periodicScheduleStartTimeInSec, periodicScheduleEndTimeInSec, intervalSecond sql.NullInt64 + periodicScheduleStartTimeInSec, periodicScheduleEndTimeInSec, intervalSecond sql.NullInt64 var cron, resourceReferencesInString sql.NullString - var enabled bool + var enabled, noCatchup bool var createdAtInSec, updatedAtInSec, maxConcurrency int64 err := r.Scan( - &uuid, &displayName, &name, &namespace, &description, - &maxConcurrency, &createdAtInSec, &updatedAtInSec, &enabled, + &uuid, &displayName, &name, &namespace, &serviceAccount, &description, + &maxConcurrency, &noCatchup, &createdAtInSec, &updatedAtInSec, &enabled, &cronScheduleStartTimeInSec, &cronScheduleEndTimeInSec, &cron, &periodicScheduleStartTimeInSec, &periodicScheduleEndTimeInSec, &intervalSecond, &pipelineId, &pipelineName, &pipelineSpecManifest, &workflowSpecManifest, ¶meters, &conditions, &resourceReferencesInString) @@ -195,10 +205,12 @@ func (s *JobStore) scanRows(r *sql.Rows) ([]*model.Job, error) { DisplayName: displayName, Name: name, Namespace: namespace, + ServiceAccount: serviceAccount, Description: description, Enabled: enabled, Conditions: conditions, MaxConcurrency: maxConcurrency, + NoCatchup: noCatchup, ResourceReferences: resourceReferences, Trigger: model.Trigger{ CronSchedule: model.CronSchedule{ @@ -258,29 +270,31 @@ func (s *JobStore) DeleteJob(id string) error { func (s *JobStore) CreateJob(j *model.Job) (*model.Job, error) { jobSql, jobArgs, err := sq. Insert("jobs"). - SetMap(sq.Eq{ - "UUID": j.UUID, - "DisplayName": j.DisplayName, - "Name": j.Name, - "Namespace": j.Namespace, - "Description": j.Description, - "MaxConcurrency": j.MaxConcurrency, - "Enabled": j.Enabled, - "Conditions": j.Conditions, - "CronScheduleStartTimeInSec": PointerToNullInt64(j.CronScheduleStartTimeInSec), - "CronScheduleEndTimeInSec": PointerToNullInt64(j.CronScheduleEndTimeInSec), - "Schedule": PointerToNullString(j.Cron), - "PeriodicScheduleStartTimeInSec": PointerToNullInt64(j.PeriodicScheduleStartTimeInSec), - "PeriodicScheduleEndTimeInSec": PointerToNullInt64(j.PeriodicScheduleEndTimeInSec), - "IntervalSecond": PointerToNullInt64(j.IntervalSecond), - "CreatedAtInSec": j.CreatedAtInSec, - "UpdatedAtInSec": j.UpdatedAtInSec, - "PipelineId": j.PipelineId, - "PipelineName": j.PipelineName, - "PipelineSpecManifest": j.PipelineSpecManifest, - "WorkflowSpecManifest": j.WorkflowSpecManifest, - "Parameters": j.Parameters, - }).ToSql() + SetMap(sq.Eq{ + "UUID": j.UUID, + "DisplayName": j.DisplayName, + "Name": j.Name, + "Namespace": j.Namespace, + "ServiceAccount": j.ServiceAccount, + "Description": j.Description, + "MaxConcurrency": j.MaxConcurrency, + "NoCatchup": j.NoCatchup, + "Enabled": j.Enabled, + "Conditions": j.Conditions, + "CronScheduleStartTimeInSec": PointerToNullInt64(j.CronScheduleStartTimeInSec), + "CronScheduleEndTimeInSec": PointerToNullInt64(j.CronScheduleEndTimeInSec), + "Schedule": PointerToNullString(j.Cron), + "PeriodicScheduleStartTimeInSec": PointerToNullInt64(j.PeriodicScheduleStartTimeInSec), + "PeriodicScheduleEndTimeInSec": PointerToNullInt64(j.PeriodicScheduleEndTimeInSec), + "IntervalSecond": PointerToNullInt64(j.IntervalSecond), + "CreatedAtInSec": j.CreatedAtInSec, + "UpdatedAtInSec": j.UpdatedAtInSec, + "PipelineId": j.PipelineId, + "PipelineName": j.PipelineName, + "PipelineSpecManifest": j.PipelineSpecManifest, + "WorkflowSpecManifest": j.WorkflowSpecManifest, + "Parameters": j.Parameters, + }).ToSql() if err != nil { return nil, util.NewInternalServerError(err, "Failed to create query to add job to job table: %v", err.Error()) @@ -314,9 +328,9 @@ func (s *JobStore) EnableJob(id string, enabled bool) error { now := s.time.Now().Unix() sql, args, err := sq. Update("jobs"). - SetMap(sq.Eq{ - "Enabled": enabled, - "UpdatedAtInSec": now}). + SetMap(sq.Eq{ + "Enabled": enabled, + "UpdatedAtInSec": now}). Where(sq.Eq{"UUID": string(id)}). Where(sq.Eq{"Enabled": !enabled}). ToSql() @@ -339,20 +353,21 @@ func (s *JobStore) UpdateJob(swf *util.ScheduledWorkflow) error { sql, args, err := sq. Update("jobs"). - SetMap(sq.Eq{ - "Name": swf.Name, - "Namespace": swf.Namespace, - "Enabled": swf.Spec.Enabled, - "Conditions": swf.ConditionSummary(), - "MaxConcurrency": swf.MaxConcurrencyOr0(), - "Parameters": parameters, - "UpdatedAtInSec": now, - "CronScheduleStartTimeInSec": PointerToNullInt64(swf.CronScheduleStartTimeInSecOrNull()), - "CronScheduleEndTimeInSec": PointerToNullInt64(swf.CronScheduleEndTimeInSecOrNull()), - "Schedule": swf.CronOrEmpty(), - "PeriodicScheduleStartTimeInSec": PointerToNullInt64(swf.PeriodicScheduleStartTimeInSecOrNull()), - "PeriodicScheduleEndTimeInSec": PointerToNullInt64(swf.PeriodicScheduleEndTimeInSecOrNull()), - "IntervalSecond": swf.IntervalSecondOr0()}). + SetMap(sq.Eq{ + "Name": swf.Name, + "Namespace": swf.Namespace, + "Enabled": swf.Spec.Enabled, + "Conditions": swf.ConditionSummary(), + "MaxConcurrency": swf.MaxConcurrencyOr0(), + "NoCatchup": swf.NoCatchupOrFalse(), + "Parameters": parameters, + "UpdatedAtInSec": now, + "CronScheduleStartTimeInSec": PointerToNullInt64(swf.CronScheduleStartTimeInSecOrNull()), + "CronScheduleEndTimeInSec": PointerToNullInt64(swf.CronScheduleEndTimeInSecOrNull()), + "Schedule": swf.CronOrEmpty(), + "PeriodicScheduleStartTimeInSec": PointerToNullInt64(swf.PeriodicScheduleStartTimeInSecOrNull()), + "PeriodicScheduleEndTimeInSec": PointerToNullInt64(swf.PeriodicScheduleEndTimeInSecOrNull()), + "IntervalSecond": swf.IntervalSecondOr0()}). Where(sq.Eq{"UUID": string(swf.UID)}). ToSql() if err != nil { diff --git a/backend/src/apiserver/storage/job_store_test.go b/backend/src/apiserver/storage/job_store_test.go index 41ed33a65db..0726950bb88 100644 --- a/backend/src/apiserver/storage/job_store_test.go +++ b/backend/src/apiserver/storage/job_store_test.go @@ -38,9 +38,9 @@ const ( func initializeDbAndStore() (*DB, *JobStore) { db := NewFakeDbOrFatal() expStore := NewExperimentStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(defaultFakeExpId, nil)) - expStore.CreateExperiment(&model.Experiment{Name: "exp1"}) + expStore.CreateExperiment(&model.Experiment{Name: "exp1", Namespace: "n1"}) expStore = NewExperimentStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(defaultFakeExpIdTwo, nil)) - expStore.CreateExperiment(&model.Experiment{Name: "exp2"}) + expStore.CreateExperiment(&model.Experiment{Name: "exp2", Namespace: "n1"}) jobStore := NewJobStore(db, util.NewFakeTimeForEpoch()) job1 := &model.Job{ UUID: "1", @@ -88,6 +88,7 @@ func initializeDbAndStore() (*DB, *JobStore) { Cron: util.StringPointer("1 * *"), }, }, + NoCatchup: true, Enabled: true, CreatedAtInSec: 2, UpdatedAtInSec: 2, @@ -163,6 +164,7 @@ func TestListJobs_Pagination(t *testing.T) { Cron: util.StringPointer("1 * *"), }, }, + NoCatchup: true, CreatedAtInSec: 2, UpdatedAtInSec: 2, Conditions: "ready", @@ -244,6 +246,7 @@ func TestListJobs_Pagination_Descent(t *testing.T) { Cron: util.StringPointer("1 * *"), }, }, + NoCatchup: true, CreatedAtInSec: 2, UpdatedAtInSec: 2, ResourceReferences: []*model.ResourceReference{ @@ -281,6 +284,7 @@ func TestListJobs_Pagination_Descent(t *testing.T) { IntervalSecond: util.Int64Pointer(3), }, }, + NoCatchup: false, CreatedAtInSec: 1, UpdatedAtInSec: 1, ResourceReferences: []*model.ResourceReference{ @@ -352,6 +356,7 @@ func TestListJobs_Pagination_LessThanPageSize(t *testing.T) { Cron: util.StringPointer("1 * *"), }, }, + NoCatchup: true, CreatedAtInSec: 2, UpdatedAtInSec: 2, ResourceReferences: []*model.ResourceReference{ @@ -414,6 +419,12 @@ func TestListJobs_FilterByReferenceKey(t *testing.T) { assert.Equal(t, "", nextPageToken) assert.Equal(t, 1, total_size) assert.Equal(t, jobsExpected, jobs) + + jobs, total_size, nextPageToken, err = jobStore.ListJobs( + &common.FilterContext{ReferenceKey: &common.ReferenceKey{Type: common.Namespace, ID: "n1"}}, opts) + assert.Nil(t, err) + assert.Equal(t, "", nextPageToken) + assert.Equal(t, 2, total_size) // both test jobs belong to namespace `n1` } func TestListJobsError(t *testing.T) { @@ -711,6 +722,7 @@ func TestUpdateJob_Success(t *testing.T) { Spec: swfapi.ScheduledWorkflowSpec{ Enabled: false, MaxConcurrency: util.Int64Pointer(200), + NoCatchup: util.BoolPointer(true), Workflow: &swfapi.WorkflowResource{ Parameters: []swfapi.Parameter{ {Name: "PARAM1", Value: "NEW_VALUE1"}, @@ -755,6 +767,7 @@ func TestUpdateJob_Success(t *testing.T) { CreatedAtInSec: 1, UpdatedAtInSec: 1, MaxConcurrency: 200, + NoCatchup: true, PipelineSpec: model.PipelineSpec{ PipelineId: "1", PipelineName: "p1", diff --git a/backend/src/apiserver/storage/minio_client_fake.go b/backend/src/apiserver/storage/minio_client_fake.go index d5b53f0c42b..f07233db5f1 100644 --- a/backend/src/apiserver/storage/minio_client_fake.go +++ b/backend/src/apiserver/storage/minio_client_fake.go @@ -59,3 +59,8 @@ func (c *FakeMinioClient) DeleteObject(bucketName, objectName string) error { func (c *FakeMinioClient) GetObjectCount() int { return len(c.minioClient) } + +func (c *FakeMinioClient) ExistObject(objectName string) bool { + _, ok := c.minioClient[objectName] + return ok +} diff --git a/backend/src/apiserver/storage/object_store.go b/backend/src/apiserver/storage/object_store.go index ce714203441..87f527ca6c0 100644 --- a/backend/src/apiserver/storage/object_store.go +++ b/backend/src/apiserver/storage/object_store.go @@ -16,6 +16,7 @@ package storage import ( "bytes" + "path" "regexp" "github.com/ghodss/yaml" @@ -34,15 +35,22 @@ type ObjectStoreInterface interface { GetFile(filePath string) ([]byte, error) AddAsYamlFile(o interface{}, filePath string) error GetFromYamlFile(o interface{}, filePath string) error + GetPipelineKey(pipelineId string) string } // Managing pipeline using Minio type MinioObjectStore struct { minioClient MinioClientInterface bucketName string + baseFolder string disableMultipart bool } +// GetPipelineKey adds the configured base folder to pipeline id. +func (m *MinioObjectStore) GetPipelineKey(pipelineID string) string { + return path.Join(m.baseFolder, pipelineID) +} + func (m *MinioObjectStore) AddFile(file []byte, filePath string) error { var parts int64 @@ -118,6 +126,6 @@ func buildPath(folder, file string) string { return folder + "/" + file } -func NewMinioObjectStore(minioClient MinioClientInterface, bucketName string, disableMultipart bool) *MinioObjectStore { - return &MinioObjectStore{minioClient: minioClient, bucketName: bucketName, disableMultipart: disableMultipart} +func NewMinioObjectStore(minioClient MinioClientInterface, bucketName string, baseFolder string, disableMultipart bool) *MinioObjectStore { + return &MinioObjectStore{minioClient: minioClient, bucketName: bucketName, baseFolder: baseFolder, disableMultipart: disableMultipart} } diff --git a/backend/src/apiserver/storage/object_store_fake.go b/backend/src/apiserver/storage/object_store_fake.go index 9fed301e44d..2c59f68341d 100644 --- a/backend/src/apiserver/storage/object_store_fake.go +++ b/backend/src/apiserver/storage/object_store_fake.go @@ -16,5 +16,5 @@ package storage // Return the object store with faked minio client. func NewFakeObjectStore() ObjectStoreInterface { - return NewMinioObjectStore(NewFakeMinioClient(), "", false) + return NewMinioObjectStore(NewFakeMinioClient(), "", "pipelines", false) } diff --git a/backend/src/apiserver/storage/object_store_test.go b/backend/src/apiserver/storage/object_store_test.go index 977e52dc05c..fc5fede25df 100644 --- a/backend/src/apiserver/storage/object_store_test.go +++ b/backend/src/apiserver/storage/object_store_test.go @@ -46,78 +46,80 @@ func (c *FakeBadMinioClient) DeleteObject(bucketName, objectName string) error { func TestAddFile(t *testing.T) { minioClient := NewFakeMinioClient() - manager := &MinioObjectStore{minioClient: minioClient} - error := manager.AddFile([]byte("abc"), CreatePipelinePath("1")) + manager := &MinioObjectStore{minioClient: minioClient, baseFolder: "pipeline"} + error := manager.AddFile([]byte("abc"), manager.GetPipelineKey("1")) assert.Nil(t, error) assert.Equal(t, 1, minioClient.GetObjectCount()) + assert.True(t, minioClient.ExistObject("pipeline/1")) } func TestAddFileError(t *testing.T) { manager := &MinioObjectStore{minioClient: &FakeBadMinioClient{}} - error := manager.AddFile([]byte("abc"), CreatePipelinePath("1")) + error := manager.AddFile([]byte("abc"), manager.GetPipelineKey("1")) assert.Equal(t, codes.Internal, error.(*util.UserError).ExternalStatusCode()) } func TestGetFile(t *testing.T) { - manager := &MinioObjectStore{minioClient: NewFakeMinioClient()} - manager.AddFile([]byte("abc"), CreatePipelinePath("1")) - file, error := manager.GetFile(CreatePipelinePath("1")) + manager := &MinioObjectStore{minioClient: NewFakeMinioClient(), baseFolder: "pipeline"} + manager.AddFile([]byte("abc"), manager.GetPipelineKey("1")) + file, error := manager.GetFile(manager.GetPipelineKey("1")) assert.Nil(t, error) assert.Equal(t, file, []byte("abc")) } func TestGetFileError(t *testing.T) { - manager := &MinioObjectStore{minioClient: &FakeBadMinioClient{}} - _, error := manager.GetFile(CreatePipelinePath("1")) + manager := &MinioObjectStore{minioClient: &FakeBadMinioClient{}, baseFolder: "pipeline"} + _, error := manager.GetFile(manager.GetPipelineKey("1")) assert.Equal(t, codes.Internal, error.(*util.UserError).ExternalStatusCode()) } func TestDeleteFile(t *testing.T) { minioClient := NewFakeMinioClient() - manager := &MinioObjectStore{minioClient: minioClient} - manager.AddFile([]byte("abc"), CreatePipelinePath("1")) - error := manager.DeleteFile(CreatePipelinePath("1")) + manager := &MinioObjectStore{minioClient: minioClient, baseFolder: "pipeline"} + manager.AddFile([]byte("abc"), manager.GetPipelineKey("1")) + error := manager.DeleteFile(manager.GetPipelineKey("1")) assert.Nil(t, error) assert.Equal(t, 0, minioClient.GetObjectCount()) } func TestDeleteFileError(t *testing.T) { manager := &MinioObjectStore{minioClient: &FakeBadMinioClient{}} - error := manager.DeleteFile(CreatePipelinePath("1")) + error := manager.DeleteFile(manager.GetPipelineKey("1")) assert.Equal(t, codes.Internal, error.(*util.UserError).ExternalStatusCode()) } func TestAddAsYamlFile(t *testing.T) { minioClient := NewFakeMinioClient() - manager := &MinioObjectStore{minioClient: minioClient} - error := manager.AddAsYamlFile(Foo{ID: 1}, CreatePipelinePath("1")) + manager := &MinioObjectStore{minioClient: minioClient, baseFolder: "pipeline"} + error := manager.AddAsYamlFile(Foo{ID: 1}, manager.GetPipelineKey("1")) assert.Nil(t, error) assert.Equal(t, 1, minioClient.GetObjectCount()) + assert.True(t, minioClient.ExistObject("pipeline/1")) } func TestGetFromYamlFile(t *testing.T) { minioClient := NewFakeMinioClient() - manager := &MinioObjectStore{minioClient: minioClient} + manager := &MinioObjectStore{minioClient: minioClient, baseFolder: "pipeline"} manager.minioClient.PutObject( - "", CreatePipelinePath("1"), + "", manager.GetPipelineKey("1"), bytes.NewReader([]byte("id: 1")), -1, minio.PutObjectOptions{ContentType: "application/octet-stream"}) expectedFoo := Foo{ID: 1} var foo Foo - error := manager.GetFromYamlFile(&foo, CreatePipelinePath("1")) + error := manager.GetFromYamlFile(&foo, manager.GetPipelineKey("1")) assert.Nil(t, error) assert.Equal(t, expectedFoo, foo) } func TestGetFromYamlFile_UnmarshalError(t *testing.T) { minioClient := NewFakeMinioClient() - manager := &MinioObjectStore{minioClient: minioClient} + manager := &MinioObjectStore{minioClient: minioClient, baseFolder: "pipeline"} manager.minioClient.PutObject( - "", CreatePipelinePath("1"), + "", manager.GetPipelineKey("1"), bytes.NewReader([]byte("invalid")), -1, minio.PutObjectOptions{ContentType: "application/octet-stream"}) var foo Foo - error := manager.GetFromYamlFile(&foo, CreatePipelinePath("1")) + error := manager.GetFromYamlFile(&foo, manager.GetPipelineKey("1")) assert.Equal(t, codes.Internal, error.(*util.UserError).ExternalStatusCode()) assert.Contains(t, error.Error(), "Failed to unmarshal") } diff --git a/backend/src/apiserver/storage/pipeline_store.go b/backend/src/apiserver/storage/pipeline_store.go index 02dce6aaf74..f46f2e4a68d 100644 --- a/backend/src/apiserver/storage/pipeline_store.go +++ b/backend/src/apiserver/storage/pipeline_store.go @@ -43,6 +43,16 @@ var pipelineColumns = []string{ "pipeline_versions.CodeSourceUrl", } +var pipelineVersionColumns = []string{ + "pipeline_versions.UUID", + "pipeline_versions.CreatedAtInSec", + "pipeline_versions.Name", + "pipeline_versions.Parameters", + "pipeline_versions.PipelineId", + "pipeline_versions.Status", + "pipeline_versions.CodeSourceUrl", +} + type PipelineStoreInterface interface { ListPipelines(opts *list.Options) ([]*model.Pipeline, int, string, error) GetPipeline(pipelineId string) (*model.Pipeline, error) @@ -50,7 +60,13 @@ type PipelineStoreInterface interface { DeletePipeline(pipelineId string) error CreatePipeline(*model.Pipeline) (*model.Pipeline, error) UpdatePipelineStatus(string, model.PipelineStatus) error + UpdatePipelineDefaultVersion(string, string) error + CreatePipelineVersion(*model.PipelineVersion) (*model.PipelineVersion, error) + GetPipelineVersion(versionId string) (*model.PipelineVersion, error) + GetPipelineVersionWithStatus(versionId string, status model.PipelineVersionStatus) (*model.PipelineVersion, error) + ListPipelineVersions(pipelineId string, opts *list.Options) ([]*model.PipelineVersion, int, string, error) + DeletePipelineVersion(pipelineVersionId string) error // Change status of a particular version. UpdatePipelineVersionStatus(pipelineVersionId string, status model.PipelineVersionStatus) error // TODO(jingzhang36): remove this temporary method after resource manager's @@ -72,7 +88,7 @@ func (s *PipelineStore) ListPipelines(opts *list.Options) ([]*model.Pipeline, in } buildQuery := func(sqlBuilder sq.SelectBuilder) sq.SelectBuilder { - return sqlBuilder. + return opts.AddFilterToSelect(sqlBuilder). From("pipelines"). LeftJoin("pipeline_versions ON pipelines.DefaultVersionId = pipeline_versions.UUID"). Where(sq.Eq{"pipelines.Status": model.PipelineReady}) @@ -206,8 +222,7 @@ func (s *PipelineStore) GetPipelineWithStatus(id string, status model.PipelineSt Select(pipelineColumns...). From("pipelines"). LeftJoin("pipeline_versions on pipelines.DefaultVersionId = pipeline_versions.UUID"). - Where(sq.Eq{"pipelines.uuid": id}). - Where(sq.Eq{"pipelines.Status": status}). + Where(sq.And{sq.Eq{"pipelines.uuid": id}, sq.Eq{"pipelines.Status": status}}). Limit(1).ToSql() if err != nil { return nil, util.NewInternalServerError(err, "Failed to create query to get pipeline: %v", err.Error()) @@ -316,7 +331,7 @@ func (s *PipelineStore) CreatePipeline(p *model.Pipeline) (*model.Pipeline, erro if err != nil { if s.db.IsDuplicateError(err) { tx.Rollback() - return nil, util.NewInvalidInputError( + return nil, util.NewAlreadyExistError( "Failed to create a new pipeline. The name %v already exist. Please specify a new name.", p.Name) } tx.Rollback() @@ -327,7 +342,7 @@ func (s *PipelineStore) CreatePipeline(p *model.Pipeline) (*model.Pipeline, erro if err != nil { if s.db.IsDuplicateError(err) { tx.Rollback() - return nil, util.NewInvalidInputError( + return nil, util.NewAlreadyExistError( `Failed to create a new pipeline version. The name %v already exist. Please specify a new name.`, p.DefaultVersion.Name) } @@ -381,6 +396,12 @@ func (s *PipelineStore) UpdatePipelineVersionStatus(id string, status model.Pipe func (s *PipelineStore) UpdatePipelineAndVersionsStatus(id string, status model.PipelineStatus, pipelineVersionId string, pipelineVersionStatus model.PipelineVersionStatus) error { tx, err := s.db.Begin() + if err != nil { + return util.NewInternalServerError( + err, + "Failed to start a transaction: %s", + err.Error()) + } sql, args, err := sq. Update("pipelines"). @@ -425,3 +446,351 @@ func (s *PipelineStore) UpdatePipelineAndVersionsStatus(id string, status model. func NewPipelineStore(db *DB, time util.TimeInterface, uuid util.UUIDGeneratorInterface) *PipelineStore { return &PipelineStore{db: db, time: time, uuid: uuid} } + +func (s *PipelineStore) CreatePipelineVersion(v *model.PipelineVersion) (*model.PipelineVersion, error) { + newPipelineVersion := *v + newPipelineVersion.CreatedAtInSec = s.time.Now().Unix() + id, err := s.uuid.NewRandom() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create a pipeline version id.") + } + newPipelineVersion.UUID = id.String() + + // Prepare queries of inserting new version and updating default version. + versionSql, versionArgs, versionErr := sq. + Insert("pipeline_versions"). + SetMap( + sq.Eq{ + "UUID": newPipelineVersion.UUID, + "CreatedAtInSec": newPipelineVersion.CreatedAtInSec, + "Name": newPipelineVersion.Name, + "Parameters": newPipelineVersion.Parameters, + "PipelineId": newPipelineVersion.PipelineId, + "Status": string(newPipelineVersion.Status), + "CodeSourceUrl": newPipelineVersion.CodeSourceUrl}). + ToSql() + if versionErr != nil { + return nil, util.NewInternalServerError( + versionErr, + "Failed to create query to insert version to pipeline version table: %v", + versionErr.Error()) + } + pipelineSql, pipelineArgs, pipelineErr := sq. + Update("pipelines"). + SetMap(sq.Eq{"DefaultVersionId": newPipelineVersion.UUID}). + Where(sq.Eq{"UUID": newPipelineVersion.PipelineId}). + ToSql() + if pipelineErr != nil { + return nil, util.NewInternalServerError( + pipelineErr, + "Failed to create query to update pipeline default version id: %v", + pipelineErr.Error()) + } + + // In a single transaction, insert new version and update default version. + tx, err := s.db.Begin() + if err != nil { + return nil, util.NewInternalServerError( + err, + "Failed to start a transaction: %v", + err.Error()) + } + + _, err = tx.Exec(versionSql, versionArgs...) + if err != nil { + tx.Rollback() + if s.db.IsDuplicateError(err) { + return nil, util.NewAlreadyExistError( + "Failed to create a new pipeline version. The name %v already exist. Please specify a new name.", v.Name) + } + return nil, util.NewInternalServerError(err, "Failed to add version to pipeline version table: %v", + err.Error()) + } + _, err = tx.Exec(pipelineSql, pipelineArgs...) + if err != nil { + tx.Rollback() + return nil, util.NewInternalServerError(err, "Failed to update pipeline default version id: %v", + err.Error()) + } + if err := tx.Commit(); err != nil { + return nil, util.NewInternalServerError(err, "Failed to create new pipeline version: %v", + err.Error()) + } + + return &newPipelineVersion, nil +} + +func (s *PipelineStore) UpdatePipelineDefaultVersion(pipelineId string, versionId string) error { + sql, args, err := sq. + Update("pipelines"). + SetMap(sq.Eq{"DefaultVersionId": versionId}). + Where(sq.Eq{"UUID": pipelineId}). + ToSql() + if err != nil { + return util.NewInternalServerError(err, "Failed to create query to update the pipeline default version: %s", err.Error()) + } + _, err = s.db.Exec(sql, args...) + if err != nil { + return util.NewInternalServerError(err, "Failed to update the pipeline default version: %s", err.Error()) + } + + return nil +} + +func (s *PipelineStore) GetPipelineVersion(versionId string) (*model.PipelineVersion, error) { + return s.GetPipelineVersionWithStatus(versionId, model.PipelineVersionReady) +} + +func (s *PipelineStore) GetPipelineVersionWithStatus(versionId string, status model.PipelineVersionStatus) (*model.PipelineVersion, error) { + sql, args, err := sq. + Select(pipelineVersionColumns...). + From("pipeline_versions"). + Where(sq.And{sq.Eq{"UUID": versionId}, sq.Eq{"Status": status}}). + Limit(1). + ToSql() + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to create query to get pipeline version: %v", err.Error()) + } + r, err := s.db.Query(sql, args...) + if err != nil { + return nil, util.NewInternalServerError(err, "Failed to get pipeline version: %v", err.Error()) + } + defer r.Close() + versions, err := s.scanPipelineVersionRows(r) + + if err != nil || len(versions) > 1 { + return nil, util.NewInternalServerError(err, "Failed to get pipeline version: %v", err.Error()) + } + if len(versions) == 0 { + return nil, util.NewResourceNotFoundError("Version", fmt.Sprint(versionId)) + } + return versions[0], nil +} + +func (s *PipelineStore) scanPipelineVersionRows(rows *sql.Rows) ([]*model.PipelineVersion, error) { + var pipelineVersions []*model.PipelineVersion + for rows.Next() { + var uuid, name, parameters, pipelineId, codeSourceUrl, status sql.NullString + var createdAtInSec sql.NullInt64 + if err := rows.Scan( + &uuid, + &createdAtInSec, + &name, + ¶meters, + &pipelineId, + &status, + &codeSourceUrl, + ); err != nil { + return nil, err + } + if uuid.Valid { + pipelineVersions = append(pipelineVersions, &model.PipelineVersion{ + UUID: uuid.String, + CreatedAtInSec: createdAtInSec.Int64, + Name: name.String, + Parameters: parameters.String, + PipelineId: pipelineId.String, + CodeSourceUrl: codeSourceUrl.String, + Status: model.PipelineVersionStatus(status.String)}) + } + } + return pipelineVersions, nil +} + +func (s *PipelineStore) ListPipelineVersions(pipelineId string, opts *list.Options) ([]*model.PipelineVersion, int, string, error) { + errorF := func(err error) ([]*model.PipelineVersion, int, string, error) { + return nil, 0, "", util.NewInternalServerError(err, "Failed to list pipeline versions: %v", err) + } + + buildQuery := func(sqlBuilder sq.SelectBuilder) sq.SelectBuilder { + return sqlBuilder. + From("pipeline_versions"). + Where(sq.And{sq.Eq{"PipelineId": pipelineId}, sq.Eq{"status": model.PipelineVersionReady}}) + } + + // SQL for pipeline version list + rowsSql, rowsArgs, err := opts.AddPaginationToSelect( + buildQuery(sq.Select(pipelineVersionColumns...))).ToSql() + if err != nil { + return errorF(err) + } + + // SQL for getting total size of pipeline versions. + sizeSql, sizeArgs, err := buildQuery(sq.Select("count(*)")).ToSql() + if err != nil { + return errorF(err) + } + + // Use a transaction to make sure we're returning the total_size of the same + // rows queried. + tx, err := s.db.Begin() + if err != nil { + glog.Errorf("Failed to start transaction to list pipelines") + return errorF(err) + } + + rows, err := tx.Query(rowsSql, rowsArgs...) + if err != nil { + tx.Rollback() + return errorF(err) + } + pipelineVersions, err := s.scanPipelineVersionRows(rows) + if err != nil { + tx.Rollback() + return errorF(err) + } + rows.Close() + + sizeRow, err := tx.Query(sizeSql, sizeArgs...) + if err != nil { + tx.Rollback() + return errorF(err) + } + total_size, err := list.ScanRowToTotalSize(sizeRow) + if err != nil { + tx.Rollback() + return errorF(err) + } + sizeRow.Close() + + err = tx.Commit() + if err != nil { + glog.Errorf("Failed to commit transaction to list pipelines") + return errorF(err) + } + + if len(pipelineVersions) <= opts.PageSize { + return pipelineVersions, total_size, "", nil + } + + npt, err := opts.NextPageToken(pipelineVersions[opts.PageSize]) + return pipelineVersions[:opts.PageSize], total_size, npt, err +} + +func (s *PipelineStore) DeletePipelineVersion(versionId string) error { + // If this version is used as default version for a pipeline, we have to + // find a new default version for that pipeline, which is usually the latest + // version of that pipeline. Then we'll have 3 operations in a single + // transactions: (1) delete version (2) get new default version id (3) use + // new default version id to update pipeline. + tx, err := s.db.Begin() + if err != nil { + return util.NewInternalServerError( + err, + "Failed to start an transaction while trying to delete pipeline version: %v", + err.Error()) + } + + // (1) delete version. + _, err = tx.Exec( + "delete from pipeline_versions where UUID = ?", + versionId) + if err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to delete pipeline version: %v", + err.Error()) + } + + // (2) check whether this version is used as default version. + r, err := tx.Query( + "select UUID from pipelines where DefaultVersionId = ?", + versionId) + if err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + `Failed to query pipelines table while deleting pipeline version: + %v`, + err.Error()) + } + var pipelineId = "" + if r.Next() { + if err := r.Scan(&pipelineId); err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to get pipeline id for version id: %v", + err.Error()) + } + } + r.Close() + if len(pipelineId) == 0 { + // The deleted version is not used as a default version. So no extra + // work is needed. We commit the deletion now. + if err := tx.Commit(); err != nil { + return util.NewInternalServerError( + err, + "Failed to delete pipeline version: %v", + err.Error()) + } + return nil + } + + // (3) find a new default version. + r, err = tx.Query( + `select UUID from pipeline_versions + where PipelineId = ? and Status = ? + order by CreatedAtInSec DESC + limit 1`, + pipelineId, + model.PipelineVersionReady) + if err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to get a new default version id: %v", + err.Error()) + } + var newDefaultVersionId = "" + if r.Next() { + if err := r.Scan(&newDefaultVersionId); err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to get a new default version id: %v", + err.Error()) + } + } + r.Close() + if len(newDefaultVersionId) == 0 { + // No new default version. The pipeline's default version id will be + // null. + _, err = tx.Exec( + "update pipelines set DefaultVersionId = null where UUID = ?", + pipelineId) + if err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to update pipeline's default version id: %v", + err.Error()) + } + } else { + _, err = tx.Exec( + "update pipelines set DefaultVersionId = ? where UUID = ?", + newDefaultVersionId, pipelineId) + if err != nil { + tx.Rollback() + return util.NewInternalServerError( + err, + "Failed to update pipeline's default version id: %v", + err.Error()) + } + } + + if err := tx.Commit(); err != nil { + return util.NewInternalServerError( + err, + "Failed to delete pipeline version: %v", + err.Error()) + } + return nil +} + +// SetUUIDGenerator is for unit tests in other packages who need to set uuid, +// since uuid is not exported. +func (s *PipelineStore) SetUUIDGenerator(new_uuid util.UUIDGeneratorInterface) { + s.uuid = new_uuid +} diff --git a/backend/src/apiserver/storage/pipeline_store_test.go b/backend/src/apiserver/storage/pipeline_store_test.go index 980acff52c3..b0f65ec4d19 100644 --- a/backend/src/apiserver/storage/pipeline_store_test.go +++ b/backend/src/apiserver/storage/pipeline_store_test.go @@ -17,6 +17,7 @@ package storage import ( "testing" + api "github.com/kubeflow/pipelines/backend/api/go_client" "github.com/kubeflow/pipelines/backend/src/apiserver/list" "github.com/kubeflow/pipelines/backend/src/apiserver/model" "github.com/kubeflow/pipelines/backend/src/common/util" @@ -29,6 +30,7 @@ const ( fakeUUIDTwo = "123e4567-e89b-12d3-a456-426655440001" fakeUUIDThree = "123e4567-e89b-12d3-a456-426655440002" fakeUUIDFour = "123e4567-e89b-12d3-a456-426655440003" + fakeUUIDFive = "123e4567-e89b-12d3-a456-426655440004" ) func createPipeline(name string) *model.Pipeline { @@ -101,6 +103,52 @@ func TestListPipelines_FilterOutNotReady(t *testing.T) { assert.Equal(t, pipelinesExpected, pipelines) } +func TestListPipelines_WithFilter(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore(db, util.NewFakeTimeForEpoch(), util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + pipelineStore.CreatePipeline(createPipeline("pipeline_foo")) + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipeline(createPipeline("pipeline_bar")) + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDThree, nil) + + expectedPipeline1 := &model.Pipeline{ + UUID: fakeUUID, + CreatedAtInSec: 1, + Name: "pipeline_foo", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + DefaultVersionId: fakeUUID, + DefaultVersion: &model.PipelineVersion{ + UUID: fakeUUID, + CreatedAtInSec: 1, + Name: "pipeline_foo", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }} + pipelinesExpected := []*model.Pipeline{expectedPipeline1} + + filterProto := &api.Filter{ + Predicates: []*api.Predicate{ + &api.Predicate{ + Key: "name", + Op: api.Predicate_IS_SUBSTRING, + Value: &api.Predicate_StringValue{StringValue: "pipeline_f"}, + }, + }, + } + opts, err := list.NewOptions(&model.Pipeline{}, 10, "id", filterProto) + assert.Nil(t, err) + + pipelines, totalSize, nextPageToken, err := pipelineStore.ListPipelines(opts) + + assert.Nil(t, err) + assert.Equal(t, "", nextPageToken) + assert.Equal(t, 1, totalSize) + assert.Equal(t, pipelinesExpected, pipelines) +} + func TestListPipelines_Pagination(t *testing.T) { db := NewFakeDbOrFatal() defer db.Close() @@ -505,3 +553,721 @@ func TestUpdatePipelineStatusError(t *testing.T) { err := pipelineStore.UpdatePipelineStatus(fakeUUID, model.PipelineDeleting) assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) } + +func TestCreatePipelineVersion(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline first. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineVersion := &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionCreating, + CodeSourceUrl: "code_source_url", + } + pipelineVersionCreated, err := pipelineStore.CreatePipelineVersion( + pipelineVersion) + + // Check whether created pipeline version is as expected. + pipelineVersionExpected := model.PipelineVersion{ + UUID: fakeUUIDTwo, + CreatedAtInSec: 2, + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineVersionCreating, + PipelineId: fakeUUID, + CodeSourceUrl: "code_source_url", + } + assert.Nil(t, err) + assert.Equal( + t, + pipelineVersionExpected, + *pipelineVersionCreated, + "Got unexpected pipeline.") + + // Check whether pipeline has updated default version id. + pipeline, err := pipelineStore.GetPipeline(fakeUUID) + assert.Nil(t, err) + assert.Equal(t, pipeline.DefaultVersionId, fakeUUIDTwo, "Got unexpected default version id.") +} + +func TestCreatePipelineVersion_DuplicateKey(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionCreating, + }) + + // Create another new version with same name. + _, err := pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param2"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionCreating, + }) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "The name pipeline_version_1 already exist") +} + +func TestCreatePipelineVersion_InternalServerError_DBClosed(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + db.Close() + // Try to create a new version but db is closed. + _, err := pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + }) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode(), + "Expected create pipeline version to return error") +} + +func TestDeletePipelineVersion(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create a second version, which will become the default version. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDThree, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Delete version with id being fakeUUIDThree. + err := pipelineStore.DeletePipelineVersion(fakeUUIDThree) + assert.Nil(t, err) + + // Check version removed. + _, err = pipelineStore.GetPipelineVersion(fakeUUIDThree) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode()) + + // Check new default version is version with id being fakeUUIDTwo. + pipeline, err := pipelineStore.GetPipeline(fakeUUID) + assert.Nil(t, err) + assert.Equal(t, pipeline.DefaultVersionId, fakeUUIDTwo) +} + +func TestDeletePipelineVersionError(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + db.Close() + // On closed db, create pipeline version ends in internal error. + err := pipelineStore.DeletePipelineVersion(fakeUUIDTwo) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) +} + +func TestGetPipelineVersion(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Get pipeline version. + pipelineVersion, err := pipelineStore.GetPipelineVersion(fakeUUIDTwo) + assert.Nil(t, err) + assert.Equal( + t, + model.PipelineVersion{ + UUID: fakeUUIDTwo, + Name: "pipeline_version_1", + CreatedAtInSec: 2, + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + *pipelineVersion, "Got unexpected pipeline version.") +} + +func TestGetPipelineVersion_InternalError(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + db.Close() + // Internal error because of closed DB. + _, err := pipelineStore.GetPipelineVersion("123") + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode(), + "Expected get pipeline to return internal error") +} + +func TestGetPipelineVersion_NotFound_VersionStatusCreating(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionCreating, + }) + + _, err := pipelineStore.GetPipelineVersion(fakeUUIDTwo) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode(), + "Expected get pipeline to return not found") +} + +func TestGetPipelineVersion_NotFoundError(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + _, err := pipelineStore.GetPipelineVersion(fakeUUID) + assert.Equal(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode(), + "Expected get pipeline to return not found") +} + +func TestListPipelineVersion_FilterOutNotReady(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a first version with status ready. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create a second version with status ready. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDThree, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create a third version with status creating. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDFour, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_3", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionCreating, + }) + + pipelineVersionsExpected := []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDTwo, + CreatedAtInSec: 2, + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady}, + &model.PipelineVersion{ + UUID: fakeUUIDThree, + CreatedAtInSec: 3, + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady}} + + opts, err := list.NewOptions(&model.PipelineVersion{}, 10, "id", nil) + assert.Nil(t, err) + + pipelineVersions, total_size, nextPageToken, err := + pipelineStore.ListPipelineVersions(fakeUUID, opts) + + assert.Nil(t, err) + assert.Equal(t, "", nextPageToken) + assert.Equal(t, 2, total_size) + assert.Equal(t, pipelineVersionsExpected, pipelineVersions) +} + +func TestListPipelineVersions_Pagination(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create "version_1" with fakeUUIDTwo. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_3" with fakeUUIDThree. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDThree, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_3", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_2" with fakeUUIDFour. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDFour, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_4" with fakeUUIDFive. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDFive, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_4", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // List results in 2 pages: first page containing version_1 and version_2; + // and second page containing verion_3 and version_4. + opts, err := list.NewOptions(&model.PipelineVersion{}, 2, "name", nil) + assert.Nil(t, err) + pipelineVersions, total_size, nextPageToken, err := + pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.NotEmpty(t, nextPageToken) + assert.Equal(t, 4, total_size) + + // First page. + assert.Equal(t, pipelineVersions, []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDTwo, + CreatedAtInSec: 2, + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + &model.PipelineVersion{ + UUID: fakeUUIDFour, + CreatedAtInSec: 4, + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + }) + + opts, err = list.NewOptionsFromToken(nextPageToken, 2) + assert.Nil(t, err) + pipelineVersions, total_size, nextPageToken, err = + pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + + // Second page. + assert.Empty(t, nextPageToken) + assert.Equal(t, 4, total_size) + assert.Equal(t, pipelineVersions, []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDThree, + CreatedAtInSec: 3, + Name: "pipeline_version_3", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + &model.PipelineVersion{ + UUID: fakeUUIDFive, + CreatedAtInSec: 5, + Name: "pipeline_version_4", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + }) +} + +func TestListPipelineVersions_Pagination_Descend(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create "version_1" with fakeUUIDTwo. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_3" with fakeUUIDThree. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDThree, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_3", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_2" with fakeUUIDFour. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDFour, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Create "version_4" with fakeUUIDFive. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDFive, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_4", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // List result in 2 pages: first page "version_4" and "version_3"; second + // page "version_2" and "version_1". + opts, err := list.NewOptions(&model.PipelineVersion{}, 2, "name desc", nil) + assert.Nil(t, err) + pipelineVersions, total_size, nextPageToken, err := + pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.NotEmpty(t, nextPageToken) + assert.Equal(t, 4, total_size) + + // First page. + assert.Equal(t, pipelineVersions, []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDFive, + CreatedAtInSec: 5, + Name: "pipeline_version_4", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + &model.PipelineVersion{ + UUID: fakeUUIDThree, + CreatedAtInSec: 3, + Name: "pipeline_version_3", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + }) + + opts, err = list.NewOptionsFromToken(nextPageToken, 2) + assert.Nil(t, err) + pipelineVersions, total_size, nextPageToken, err = + pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.Empty(t, nextPageToken) + assert.Equal(t, 4, total_size) + + // Second Page. + assert.Equal(t, pipelineVersions, []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDFour, + CreatedAtInSec: 4, + Name: "pipeline_version_2", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + &model.PipelineVersion{ + UUID: fakeUUIDTwo, + CreatedAtInSec: 2, + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + }) +} + +func TestListPipelineVersions_Pagination_LessThanPageSize(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + opts, err := list.NewOptions(&model.PipelineVersion{}, 2, "", nil) + assert.Nil(t, err) + pipelineVersions, total_size, nextPageToken, err := + pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Nil(t, err) + assert.Equal(t, "", nextPageToken) + assert.Equal(t, 1, total_size) + assert.Equal(t, pipelineVersions, []*model.PipelineVersion{ + &model.PipelineVersion{ + UUID: fakeUUIDTwo, + Name: "pipeline_version_1", + CreatedAtInSec: 2, + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }, + }) +} + +func TestListPipelineVersionsError(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + db.Close() + // Internal error because of closed DB. + opts, err := list.NewOptions(&model.PipelineVersion{}, 2, "", nil) + assert.Nil(t, err) + _, _, _, err = pipelineStore.ListPipelineVersions(fakeUUID, opts) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) +} + +func TestUpdatePipelineVersionStatus(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + // Create a pipeline. + pipelineStore.CreatePipeline( + &model.Pipeline{ + Name: "pipeline_1", + Parameters: `[{"Name": "param1"}]`, + Status: model.PipelineReady, + }) + + // Create a version under the above pipeline. + pipelineStore.uuid = util.NewFakeUUIDGeneratorOrFatal(fakeUUIDTwo, nil) + pipelineVersion, _ := pipelineStore.CreatePipelineVersion( + &model.PipelineVersion{ + Name: "pipeline_version_1", + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionReady, + }) + + // Change version to deleting status + err := pipelineStore.UpdatePipelineVersionStatus( + pipelineVersion.UUID, model.PipelineVersionDeleting) + assert.Nil(t, err) + + // Check the new status by retrieving this pipeline version. + retrievedPipelineVersion, err := + pipelineStore.GetPipelineVersionWithStatus( + pipelineVersion.UUID, model.PipelineVersionDeleting) + assert.Nil(t, err) + assert.Equal(t, *retrievedPipelineVersion, model.PipelineVersion{ + UUID: fakeUUIDTwo, + Name: "pipeline_version_1", + CreatedAtInSec: 2, + Parameters: `[{"Name": "param1"}]`, + PipelineId: fakeUUID, + Status: model.PipelineVersionDeleting, + }) +} + +func TestUpdatePipelineVersionStatusError(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + pipelineStore := NewPipelineStore( + db, + util.NewFakeTimeForEpoch(), + util.NewFakeUUIDGeneratorOrFatal(fakeUUID, nil)) + + db.Close() + // Internal error because of closed DB. + err := pipelineStore.UpdatePipelineVersionStatus( + fakeUUID, model.PipelineVersionDeleting) + assert.Equal(t, codes.Internal, err.(*util.UserError).ExternalStatusCode()) +} diff --git a/backend/src/apiserver/storage/resource_reference_store.go b/backend/src/apiserver/storage/resource_reference_store.go index b0dab45cf53..79a65817b0f 100644 --- a/backend/src/apiserver/storage/resource_reference_store.go +++ b/backend/src/apiserver/storage/resource_reference_store.go @@ -18,7 +18,7 @@ var resourceReferenceColumns = []string{"ResourceUUID", "ResourceType", "Referen type ResourceReferenceStoreInterface interface { // Retrieve the resource reference for a given resource id, type and a reference type. GetResourceReference(resourceId string, resourceType common.ResourceType, - referenceType common.ResourceType) (*model.ResourceReference, error) + referenceType common.ResourceType) (*model.ResourceReference, error) } type ResourceReferenceStore struct { @@ -62,6 +62,12 @@ func (s *ResourceReferenceStore) checkReferenceExist(tx *sql.Tx, referenceId str selectBuilder = sq.Select("1").From("jobs").Where(sq.Eq{"uuid": referenceId}) case common.Experiment: selectBuilder = sq.Select("1").From("experiments").Where(sq.Eq{"uuid": referenceId}) + case common.PipelineVersion: + selectBuilder = sq.Select("1").From("pipeline_versions").Where(sq.Eq{"uuid": referenceId}) + case common.Namespace: + // This function is called to check the data validity when the data are transformed according to the DB schema. + // Since there is not a separate table to store the namespace data, thus always returning true. + return true default: return false } @@ -82,9 +88,9 @@ func (s *ResourceReferenceStore) checkReferenceExist(tx *sql.Tx, referenceId str func (s *ResourceReferenceStore) DeleteResourceReferences(tx *sql.Tx, id string, resourceType common.ResourceType) error { refSql, refArgs, err := sq. Delete("resource_references"). - Where(sq.Or{ - sq.Eq{"ResourceUUID": id, "ResourceType": resourceType}, - sq.Eq{"ReferenceUUID": id, "ReferenceType": resourceType}}). + Where(sq.Or{ + sq.Eq{"ResourceUUID": id, "ResourceType": resourceType}, + sq.Eq{"ReferenceUUID": id, "ReferenceType": resourceType}}). ToSql() _, err = tx.Exec(refSql, refArgs...) if err != nil { @@ -94,24 +100,24 @@ func (s *ResourceReferenceStore) DeleteResourceReferences(tx *sql.Tx, id string, } func (s *ResourceReferenceStore) GetResourceReference(resourceId string, resourceType common.ResourceType, - referenceType common.ResourceType) (*model.ResourceReference, error) { + referenceType common.ResourceType) (*model.ResourceReference, error) { sql, args, err := sq.Select(resourceReferenceColumns...). From("resource_references"). - Where(sq.Eq{ - "ResourceUUID": resourceId, - "ResourceType": resourceType, - "ReferenceType": referenceType}). + Where(sq.Eq{ + "ResourceUUID": resourceId, + "ResourceType": resourceType, + "ReferenceType": referenceType}). Limit(1).ToSql() if err != nil { return nil, util.NewInternalServerError(err, "Failed to create query to get resource reference. "+ - "Resource ID: %s. Resource Type: %s. Reference Type: %s", resourceId, resourceType, referenceType) + "Resource ID: %s. Resource Type: %s. Reference Type: %s", resourceId, resourceType, referenceType) } row, err := s.db.Query(sql, args...) if err != nil { return nil, util.NewInternalServerError(err, "Failed to get resource reference. "+ - "Resource ID: %s. Resource Type: %s. Reference Type: %s", resourceId, resourceType, referenceType) + "Resource ID: %s. Resource Type: %s. Reference Type: %s", resourceId, resourceType, referenceType) } defer row.Close() reference, err := s.scanRows(row) diff --git a/backend/src/apiserver/storage/run_store.go b/backend/src/apiserver/storage/run_store.go index 12c138f60f6..c023a34d071 100644 --- a/backend/src/apiserver/storage/run_store.go +++ b/backend/src/apiserver/storage/run_store.go @@ -17,6 +17,7 @@ package storage import ( "database/sql" "fmt" + "github.com/pkg/errors" sq "github.com/Masterminds/squirrel" @@ -31,7 +32,7 @@ import ( "k8s.io/apimachinery/pkg/util/json" ) -var runColumns = []string{"UUID", "DisplayName", "Name", "StorageState", "Namespace", "Description", +var runColumns = []string{"UUID", "ExperimentUUID", "DisplayName", "Name", "StorageState", "Namespace", "ServiceAccount", "Description", "CreatedAtInSec", "ScheduledAtInSec", "FinishedAtInSec", "Conditions", "PipelineId", "PipelineName", "PipelineSpecManifest", "WorkflowSpecManifest", "Parameters", "pipelineRuntimeManifest", "WorkflowRuntimeManifest", } @@ -143,8 +144,23 @@ func (s *RunStore) ListRuns( func (s *RunStore) buildSelectRunsQuery(selectCount bool, opts *list.Options, filterContext *common.FilterContext) (string, []interface{}, error) { - filteredSelectBuilder, err := list.FilterOnResourceReference("run_details", runColumns, - common.Run, selectCount, filterContext) + + var filteredSelectBuilder sq.SelectBuilder + var err error + + refKey := filterContext.ReferenceKey + if refKey != nil && refKey.Type == "ExperimentUUID" { + // for performance reasons need to special treat experiment ID filter on runs + // currently only the run table have experiment UUID column + filteredSelectBuilder, err = list.FilterOnExperiment("run_details", runColumns, + selectCount, refKey.ID) + } else if refKey != nil && refKey.Type == common.Namespace { + filteredSelectBuilder, err = list.FilterOnNamespace("run_details", runColumns, + selectCount, refKey.ID) + } else { + filteredSelectBuilder, err = list.FilterOnResourceReference("run_details", runColumns, + common.Run, selectCount, filterContext) + } if err != nil { return "", nil, util.NewInternalServerError(err, "Failed to list runs: %v", err) } @@ -210,23 +226,26 @@ func (s *RunStore) addMetricsAndResourceReferences(filteredSelectBuilder sq.Sele Select("subq.*", resourceRefConcatQuery+" AS refs"). FromSelect(subQ, "subq"). // Append all the resource references for the run as a json column - LeftJoin("(select * from resource_references where ResourceType='Run') AS r ON subq.UUID=r.ResourceUUID"). + LeftJoin("resource_references AS r ON r.ResourceType='Run' AND subq.UUID=r.ResourceUUID"). GroupBy("subq.UUID") } func (s *RunStore) scanRowsToRunDetails(rows *sql.Rows) ([]*model.RunDetail, error) { var runs []*model.RunDetail for rows.Next() { - var uuid, displayName, name, storageState, namespace, description, pipelineId, pipelineName, pipelineSpecManifest, - workflowSpecManifest, parameters, conditions, pipelineRuntimeManifest, workflowRuntimeManifest string + var uuid, experimentUUID, displayName, name, storageState, namespace, serviceAccount, description, pipelineId, + pipelineName, pipelineSpecManifest, workflowSpecManifest, parameters, conditions, pipelineRuntimeManifest, + workflowRuntimeManifest string var createdAtInSec, scheduledAtInSec, finishedAtInSec int64 var metricsInString, resourceReferencesInString sql.NullString err := rows.Scan( &uuid, + &experimentUUID, &displayName, &name, &storageState, &namespace, + &serviceAccount, &description, &createdAtInSec, &scheduledAtInSec, @@ -260,10 +279,12 @@ func (s *RunStore) scanRowsToRunDetails(rows *sql.Rows) ([]*model.RunDetail, err } runs = append(runs, &model.RunDetail{Run: model.Run{ UUID: uuid, + ExperimentUUID: experimentUUID, DisplayName: displayName, Name: name, StorageState: storageState, Namespace: namespace, + ServiceAccount: serviceAccount, Description: description, CreatedAtInSec: createdAtInSec, ScheduledAtInSec: scheduledAtInSec, @@ -320,10 +341,12 @@ func (s *RunStore) CreateRun(r *model.RunDetail) (*model.RunDetail, error) { Insert("run_details"). SetMap(sq.Eq{ "UUID": r.UUID, + "ExperimentUUID": r.ExperimentUUID, "DisplayName": r.DisplayName, "Name": r.Name, "StorageState": r.StorageState, "Namespace": r.Namespace, + "ServiceAccount": r.ServiceAccount, "Description": r.Description, "CreatedAtInSec": r.CreatedAtInSec, "ScheduledAtInSec": r.ScheduledAtInSec, diff --git a/backend/src/apiserver/visualization/exporter.py b/backend/src/apiserver/visualization/exporter.py index c5cc3d0f8cc..2f56f57711f 100644 --- a/backend/src/apiserver/visualization/exporter.py +++ b/backend/src/apiserver/visualization/exporter.py @@ -149,5 +149,5 @@ def generate_html_from_notebook(self, nb: NotebookNode) -> Text: # Output generator self.ep.preprocess(nb, {"metadata": {"path": Path.cwd()}}, self.km) # Export all html and outputs - body, _ = html_exporter.from_notebook_node(nb) + body, _ = html_exporter.from_notebook_node(nb, resources={}) return body diff --git a/backend/src/apiserver/visualization/license-download.sh b/backend/src/apiserver/visualization/license-download.sh new file mode 100755 index 00000000000..191f4909364 --- /dev/null +++ b/backend/src/apiserver/visualization/license-download.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to download licenses based on +# a specified license table (3 columns: name,license_link,license_type) csv file. +# +# Usage: +# ./license-download.sh ./third_party_licenses.csv ./third_party_licenses + +mkdir -p $2 + +IFS=$'\n' +while IFS=, read -r col1 col2 col3 +do + DEST="$2/$col1.LICENSE" + if [[ -f "$DEST" ]]; then + echo "Skip downloaded license file $DEST." + else + wget -O $2/$col1.LICENSE $col2 + fi +done < $1 diff --git a/backend/src/apiserver/visualization/license.sh b/backend/src/apiserver/visualization/license.sh index 3f7de97882c..1909a53302e 100755 --- a/backend/src/apiserver/visualization/license.sh +++ b/backend/src/apiserver/visualization/license.sh @@ -52,12 +52,37 @@ fi # Gather license files for each package. For packages with GPL license we mirror the source code. mkdir -p $2/source +EXTRANEOUS=() while IFS=, read -r col1 col2 col3 do if [[ " ${INSTALLED_PACKAGES[@]} " =~ " ${col1} " ]]; then - wget -O $2/$col1.LICENSE $col2 + # Download license is moved to part of `./license-download.sh`. + # wget -O $2/$col1.LICENSE $col2 + # We check existence of the license file instead. + if [[ -f "$2/${col1}.LICENSE" ]]; then + # Verifies the LICENSE file is not empty. + if [[ -s "$2/${col1}.LICENSE" ]]; then + echo "OK: ${col1}'s license exists." + else + echo "Error: ${col1}'s license file exists, but is empty." + exit 1 + fi + else + echo "Error: ${col1} package's license is missing." + echo "Please rerun ./license-download.sh locally and commit into licenses folder." + exit 1 + fi if [[ "${col3}" == *GPL* ]] || [[ "${col3}" =~ ^MPL ]]; then pip install -t "$2/source/${col1}" ${col1} fi + else + EXTRANEOUS+=("$col1") fi done < $1 + +if [ -n "${EXTRANEOUS}" ]; then + echo "Some libraries are part of third_party_licenses.csv, but not installed." + echo "Please remove them from third_party_licenses.csv:" + echo "${EXTRANEOUS[@]}" + exit 1 +fi diff --git a/backend/src/apiserver/visualization/requirements.txt b/backend/src/apiserver/visualization/requirements.txt index 16bfdf4b2e4..ae27d4dd064 100644 --- a/backend/src/apiserver/visualization/requirements.txt +++ b/backend/src/apiserver/visualization/requirements.txt @@ -3,11 +3,14 @@ gcsfs==0.2.3 google-api-python-client==1.7.9 itables==0.1.0 ipykernel==5.1.1 +ipython==7.12.0 jupyter_client==5.2.4 nbconvert==5.5.0 nbformat==4.4.0 pandas==0.24.2 +pyarrow==0.15.1 scikit_learn==0.21.2 -tensorflow-data-validation==0.13.1 -tensorflow-model-analysis==0.13.1 +tensorflow-metadata==0.21.1 +tensorflow-model-analysis==0.21.5 +tensorflow-data-validation==0.21.1 tornado==6.0.2 diff --git a/backend/src/apiserver/visualization/templates/full.tpl b/backend/src/apiserver/visualization/templates/full.tpl index 58107182f62..2aac4185414 100644 --- a/backend/src/apiserver/visualization/templates/full.tpl +++ b/backend/src/apiserver/visualization/templates/full.tpl @@ -15,32 +15,7 @@ {% block ipywidgets %} -{%- if "widgets" in nb.metadata -%} - -{%- endif -%} + {% endblock ipywidgets %} {% for css in resources.inlining.css -%} @@ -98,4 +73,4 @@ div#notebook-container{ {% block footer %} {{ super() }} -{% endblock footer %} \ No newline at end of file +{% endblock footer %} diff --git a/backend/src/apiserver/visualization/third_party_licenses.csv b/backend/src/apiserver/visualization/third_party_licenses.csv index b91b98f5e21..a8446b58527 100644 --- a/backend/src/apiserver/visualization/third_party_licenses.csv +++ b/backend/src/apiserver/visualization/third_party_licenses.csv @@ -1,118 +1,142 @@ -Jinja2,https://github.com/pallets/jinja/blob/master/LICENSE.rst,BSD-3 -Keras-Applications,https://github.com/keras-team/keras-applications/blob/master/LICENSE,MIT -Keras-Preprocessing,https://github.com/keras-team/keras-preprocessing/blob/master/LICENSE,MIT -Markdown,https://github.com/Python-Markdown/markdown/blob/master/LICENSE.md,BSD-3 -MarkupSafe,https://github.com/pallets/markupsafe/blob/master/LICENSE.rst,BSD-3 -Pillow,https://github.com/python-pillow/Pillow/blob/master/LICENSE,PIL -PyYAML,https://github.com/yaml/pyyaml/blob/master/LICENSE,MIT -Pygments,https://bitbucket.org/birkenfeld/pygments-main/src/default/LICENSE,BSD-2 -Send2Trash,https://github.com/hsoft/send2trash/blob/master/LICENSE,BSD-3 -Werkzeug,https://github.com/pallets/werkzeug/blob/master/LICENSE.rst,BSD-3 -absl-py,https://github.com/abseil/abseil-py/blob/master/LICENSE,Apache 2.0 -apache-beam,https://github.com/apache/beam/blob/master/LICENSE,Apache 2.0 -astor,https://github.com/berkerpeksag/astor/blob/master/LICENSE,BSD-3 -attrs,https://github.com/python-attrs/attrs/blob/master/LICENSE,MIT -avro-python3,https://github.com/apache/avro/blob/master/LICENSE.txt,Apache 2.0 -backcall,https://github.com/takluyver/backcall/blob/master/LICENSE,BSD-3 -bleach,https://github.com/mozilla/bleach/blob/master/LICENSE,Apache 2.0 -bokeh,https://github.com/bokeh/bokeh/blob/master/LICENSE.txt,BSD-3 -cachetools,https://github.com/tkem/cachetools/blob/master/LICENSE,MIT -certifi,https://github.com/certifi/python-certifi/blob/master/LICENSE,MPL 2.0 -chardet,https://github.com/chardet/chardet/blob/master/LICENSE,LGPL +Jinja2,https://raw.githubusercontent.com/pallets/jinja/master/LICENSE.rst,BSD-3 +Keras-Applications,https://raw.githubusercontent.com/keras-team/keras-applications/master/LICENSE,MIT +Keras-Preprocessing,https://raw.githubusercontent.com/keras-team/keras-preprocessing/master/LICENSE,MIT +Markdown,https://raw.githubusercontent.com/Python-Markdown/markdown/master/LICENSE.md,BSD-3 +MarkupSafe,https://raw.githubusercontent.com/pallets/markupsafe/master/LICENSE.rst,BSD-3 +Pillow,https://raw.githubusercontent.com/python-pillow/Pillow/master/LICENSE,PIL +PyYAML,https://raw.githubusercontent.com/yaml/pyyaml/master/LICENSE,MIT +Pygments,https://raw.githubusercontent.com/pygments/pygments/master/LICENSE,BSD-2 +Send2Trash,https://raw.githubusercontent.com/hsoft/send2trash/master/LICENSE,BSD-3 +Werkzeug,https://raw.githubusercontent.com/pallets/werkzeug/master/LICENSE.rst,BSD-3 +absl-py,https://raw.githubusercontent.com/abseil/abseil-py/master/LICENSE,Apache 2.0 +apache-beam,https://raw.githubusercontent.com/apache/beam/master/LICENSE,Apache 2.0 +astor,https://raw.githubusercontent.com/berkerpeksag/astor/master/LICENSE,BSD-3 +attrs,https://raw.githubusercontent.com/python-attrs/attrs/master/LICENSE,MIT +avro-python3,https://raw.githubusercontent.com/apache/avro/master/LICENSE.txt,Apache 2.0 +backcall,https://raw.githubusercontent.com/takluyver/backcall/master/LICENSE,BSD-3 +bleach,https://raw.githubusercontent.com/mozilla/bleach/master/LICENSE,Apache 2.0 +bokeh,https://raw.githubusercontent.com/bokeh/bokeh/master/LICENSE.txt,BSD-3 +cachetools,https://raw.githubusercontent.com/tkem/cachetools/master/LICENSE,MIT +certifi,https://raw.githubusercontent.com/certifi/python-certifi/master/LICENSE,MPL 2.0 +chardet,https://raw.githubusercontent.com/chardet/chardet/master/LICENSE,LGPL crcmod,http://crcmod.sourceforge.net/intro.html#license,MIT -decorator,https://github.com/micheles/decorator/blob/master/LICENSE.txt,BSD-2 -defusedxml,https://github.com/tiran/defusedxml/blob/master/LICENSE,PSF -dill,https://github.com/uqfoundation/dill/blob/master/LICENSE,BSD-3 -docopt,https://github.com/docopt/docopt/blob/master/LICENSE-MIT,MIT -entrypoints,https://github.com/takluyver/entrypoints/blob/master/LICENSE,MIT -fastavro,https://github.com/fastavro/fastavro/blob/master/LICENSE,MIT -fasteners,https://github.com/harlowja/fasteners/blob/master/LICENSE,Apache 2.0 +decorator,https://raw.githubusercontent.com/micheles/decorator/master/LICENSE.txt,BSD-2 +defusedxml,https://raw.githubusercontent.com/tiran/defusedxml/master/LICENSE,PSF +dill,https://raw.githubusercontent.com/uqfoundation/dill/master/LICENSE,BSD-3 +docopt,https://raw.githubusercontent.com/docopt/docopt/master/LICENSE-MIT,MIT +entrypoints,https://raw.githubusercontent.com/takluyver/entrypoints/master/LICENSE,MIT +fastavro,https://raw.githubusercontent.com/fastavro/fastavro/master/LICENSE,MIT +fasteners,https://raw.githubusercontent.com/harlowja/fasteners/master/LICENSE,Apache 2.0 future,https://python-future.org/credits.html#licence,MIT -gast,https://github.com/serge-sans-paille/gast/blob/master/LICENSE,BSD-3 -gcsfs,https://github.com/dask/gcsfs/blob/master/LICENSE.txt,BSD-3 -google-api-core,https://github.com/googleapis/google-cloud-python/blob/master/LICENSE,Apache 2.0 -google-api-python-client,https://github.com/googleapis/google-api-python-client/blob/master/LICENSE,Apache 2.0 -google-apitools,https://github.com/google/apitools/blob/master/LICENSE,Apache 2.0 -google-auth,https://github.com/googleapis/google-auth-library-python/blob/master/LICENSE,Apache 2.0 -google-auth-httplib2,https://github.com/GoogleCloudPlatform/google-auth-library-python-httplib2/blob/master/LICENSE,Apache 2.0 -google-auth-oauthlib,https://github.com/googleapis/google-auth-library-python-oauthlib/blob/master/LICENSE,Apache 2.0 -google-cloud-bigquery,https://github.com/googleapis/google-cloud-python/blob/master/LICENSE,Apache 2.0 -google-cloud-bigtable,https://github.com/googleapis/google-cloud-python/blob/master/LICENSE,Apache 2.0 -google-cloud-core,https://github.com/googleapis/google-cloud-python/blob/master/LICENSE,Apache 2.0 -google-cloud-datastore,https://github.com/googleapis/google-cloud-python/blob/master/LICENSE,Apache 2.0 -google-cloud-pubsub,https://github.com/googleapis/google-cloud-python/blob/master/LICENSE,Apache 2.0 -google-resumable-media,https://github.com/googleapis/google-resumable-media-python/blob/master/LICENSE,Apache 2.0 -googleapis-common-protos,https://github.com/googleapis/api-common-protos/blob/master/LICENSE,Apache 2.0 -grpc-google-iam-v1,https://github.com/googleapis/googleapis/blob/master/LICENSE,Apache 2.0 -grpcio,https://github.com/grpc/grpc/blob/master/LICENSE,Apache 2.0 -h5py,https://github.com/h5py/h5py/blob/master/LICENSE,BSD-3 -hdfs,https://github.com/mtth/hdfs/blob/master/LICENSE,MIT -httplib2,https://github.com/httplib2/httplib2/blob/master/LICENSE,MIT -idna,https://github.com/kjd/idna/blob/master/LICENSE.rst,BSD-3 -ipykernel,https://github.com/ipython/ipykernel/blob/master/COPYING.md,BSD-3 -ipython,https://github.com/ipython/ipython/blob/master/LICENSE,BSD-3 -ipython-genutils,https://github.com/ipython/ipython_genutils/blob/master/COPYING.md,BSD-3 -ipywidgets,https://github.com/jupyter-widgets/ipywidgets/blob/master/LICENSE,BSD-3 -itables,https://github.com/mwouts/itables/blob/master/LICENSE,MIT -jedi,https://github.com/davidhalter/jedi/blob/master/LICENSE.txt,MIT -joblib,https://github.com/joblib/joblib/blob/master/LICENSE.txt,BSD-3 -jsonschema,https://github.com/Julian/jsonschema/blob/master/COPYING,MIT -jupyter,https://github.com/jupyter/jupyter/blob/master/LICENSE,BSD-3 -jupyter-client,https://github.com/jupyter/jupyter_client/blob/master/COPYING.md,BSD-3 -jupyter-console,https://github.com/jupyter/jupyter_console/blob/master/COPYING.md,BSD-3 -jupyter-core,https://github.com/jupyter/jupyter_core/blob/master/COPYING.md,BSD-3 -mistune,https://github.com/lepture/mistune/blob/master/LICENSE,BSD-3 -mock,https://github.com/testing-cabal/mock/blob/master/LICENSE.txt,BSD-2 -monotonic,https://github.com/atdt/monotonic/blob/master/LICENSE,Apache 2.0 -nbconvert,https://github.com/jupyter/nbconvert/blob/master/LICENSE,BSD-3 -nbformat,https://github.com/jupyter/nbformat/blob/master/COPYING.md,BSD-3 -notebook,https://github.com/jupyter/notebook/blob/master/COPYING.md,BSD-3 -numpy,https://github.com/numpy/numpy/blob/master/LICENSE.txt,BSD-3 -oauth2client,https://github.com/googleapis/oauth2client/blob/master/LICENSE,Apache 2.0 -oauthlib,https://github.com/oauthlib/oauthlib/blob/master/LICENSE,BSD-3 +gast,https://raw.githubusercontent.com/serge-sans-paille/gast/master/LICENSE,BSD-3 +gcsfs,https://raw.githubusercontent.com/dask/gcsfs/master/LICENSE.txt,BSD-3 +google-api-core,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-api-python-client,https://raw.githubusercontent.com/googleapis/google-api-python-client/master/LICENSE,Apache 2.0 +google-apitools,https://raw.githubusercontent.com/google/apitools/master/LICENSE,Apache 2.0 +google-auth,https://raw.githubusercontent.com/googleapis/google-auth-library-python/master/LICENSE,Apache 2.0 +google-auth-httplib2,https://raw.githubusercontent.com/GoogleCloudPlatform/google-auth-library-python-httplib2/master/LICENSE,Apache 2.0 +google-auth-oauthlib,https://raw.githubusercontent.com/googleapis/google-auth-library-python-oauthlib/master/LICENSE,Apache 2.0 +google-cloud-bigquery,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-bigtable,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-dlp,https://raw.githubusercontent.com/googleapis/python-dlp/master/LICENSE,Apache 2.0 +google-cloud-core,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-datastore,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-language,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-pubsub,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-spanner,https://raw.githubusercontent.com/googleapis/python-spanner/master/LICENSE,Apache 2.0 +google-cloud-videointelligence,https://raw.githubusercontent.com/googleapis/python-videointelligence/master/LICENSE,Apache 2.0 +google-cloud-vision,https://raw.githubusercontent.com/googleapis/python-vision/master/LICENSE,Apache 2.0 +google-resumable-media,https://raw.githubusercontent.com/googleapis/google-resumable-media-python/master/LICENSE,Apache 2.0 +googleapis-common-protos,https://raw.githubusercontent.com/googleapis/api-common-protos/master/LICENSE,Apache 2.0 +grpc-google-iam-v1,https://raw.githubusercontent.com/googleapis/googleapis/master/LICENSE,Apache 2.0 +grpcio,https://raw.githubusercontent.com/grpc/grpc/master/LICENSE,Apache 2.0 +grpcio-gcp,https://raw.githubusercontent.com/GoogleCloudPlatform/grpc-gcp-python/master/LICENSE,Apache 2.0 +h5py,https://raw.githubusercontent.com/h5py/h5py/master/LICENSE,BSD-3 +hdfs,https://raw.githubusercontent.com/mtth/hdfs/master/LICENSE,MIT +httplib2,https://raw.githubusercontent.com/httplib2/httplib2/master/LICENSE,MIT +idna,https://raw.githubusercontent.com/kjd/idna/master/LICENSE.rst,BSD-3 +importlib-metadata,https://gitlab.com/python-devs/importlib_metadata/master/LICENSE,Apache 2.0 +ipykernel,https://raw.githubusercontent.com/ipython/ipykernel/master/COPYING.md,BSD-3 +ipython,https://raw.githubusercontent.com/ipython/ipython/master/LICENSE,BSD-3 +ipython-genutils,https://raw.githubusercontent.com/ipython/ipython_genutils/master/COPYING.md,BSD-3 +ipywidgets,https://raw.githubusercontent.com/jupyter-widgets/ipywidgets/master/LICENSE,BSD-3 +itables,https://raw.githubusercontent.com/mwouts/itables/master/LICENSE,MIT +jedi,https://raw.githubusercontent.com/davidhalter/jedi/master/LICENSE.txt,MIT +joblib,https://raw.githubusercontent.com/joblib/joblib/master/LICENSE.txt,BSD-3 +jsonschema,https://raw.githubusercontent.com/Julian/jsonschema/master/COPYING,MIT +jupyter,https://raw.githubusercontent.com/jupyter/jupyter/master/LICENSE,BSD-3 +jupyter-client,https://raw.githubusercontent.com/jupyter/jupyter_client/master/COPYING.md,BSD-3 +jupyter-console,https://raw.githubusercontent.com/jupyter/jupyter_console/master/COPYING.md,BSD-3 +jupyter-core,https://raw.githubusercontent.com/jupyter/jupyter_core/master/COPYING.md,BSD-3 +mistune,https://raw.githubusercontent.com/lepture/mistune/master/LICENSE,BSD-3 +mock,https://raw.githubusercontent.com/testing-cabal/mock/master/LICENSE.txt,BSD-2 +monotonic,https://raw.githubusercontent.com/atdt/monotonic/master/LICENSE,Apache 2.0 +nbconvert,https://raw.githubusercontent.com/jupyter/nbconvert/master/LICENSE,BSD-3 +nbformat,https://raw.githubusercontent.com/jupyter/nbformat/master/COPYING.md,BSD-3 +notebook,https://raw.githubusercontent.com/jupyter/notebook/master/LICENSE,BSD-3 +numpy,https://raw.githubusercontent.com/numpy/numpy/master/LICENSE.txt,BSD-3 +oauth2client,https://raw.githubusercontent.com/googleapis/oauth2client/master/LICENSE,Apache 2.0 +oauthlib,https://raw.githubusercontent.com/oauthlib/oauthlib/master/LICENSE,BSD-3 packaging,https://creativecommons.org/licenses/by-sa/3.0/,CC by-SA 3 -pandas,https://github.com/pandas-dev/pandas/blob/master/LICENSE,BSD-3 -pandocfilters,https://github.com/jgm/pandocfilters/blob/master/LICENSE,BSD-3 -parso,https://github.com/davidhalter/parso/blob/master/LICENSE.txt,MIT -pbr,https://opendev.org/openstack/pbr/src/branch/master/LICENSE,Apache 2.0 -pexpect,https://github.com/pexpect/pexpect/blob/master/LICENSE,ISC -pickleshare,https://github.com/pickleshare/pickleshare/blob/master/LICENSE,MIT -prometheus-client,https://github.com/prometheus/client_python/blob/master/LICENSE,Apache 2.0 -prompt-toolkit,https://github.com/prompt-toolkit/python-prompt-toolkit/blob/master/LICENSE,BSD-3 -protobuf,https://github.com/protocolbuffers/protobuf/blob/master/LICENSE,BSD-3 -ptyprocess,https://github.com/pexpect/ptyprocess/blob/master/LICENSE,ISC -pyarrow,https://github.com/apache/arrow/blob/master/LICENSE.txt,Apache 2.0 -pyasn1,https://github.com/etingof/pyasn1/blob/master/LICENSE.rst,BSD-2 -pyasn1-modules,https://github.com/etingof/pyasn1-modules/blob/master/LICENSE.txt,BSD-2 -pydot,https://github.com/pydot/pydot/blob/master/LICENSE,MIT -pymongo,https://github.com/mongodb/mongo-python-driver/blob/master/LICENSE,Apache 2.0 -pyparsing,https://github.com/pyparsing/pyparsing/blob/master/LICENSE,MIT -pyrsistent,https://github.com/tobgu/pyrsistent/blob/master/LICENCE.mit,MIT -python-dateutil,https://github.com/dateutil/dateutil/blob/master/LICENSE,Apache 2.0 -pymongo,https://github.com/mongodb/mongo-python-driver/blob/master/LICENSE,Apache 2.0 +pandas,https://raw.githubusercontent.com/pandas-dev/pandas/master/LICENSE,BSD-3 +pandocfilters,https://raw.githubusercontent.com/jgm/pandocfilters/master/LICENSE,BSD-3 +parso,https://raw.githubusercontent.com/davidhalter/parso/master/LICENSE.txt,MIT +pbr,https://opendev.org/openstack/pbr/raw/branch/master/LICENSE,Apache 2.0 +pexpect,https://raw.githubusercontent.com/pexpect/pexpect/master/LICENSE,ISC +pickleshare,https://raw.githubusercontent.com/pickleshare/pickleshare/master/LICENSE,MIT +prometheus-client,https://raw.githubusercontent.com/prometheus/client_python/master/LICENSE,Apache 2.0 +prompt-toolkit,https://raw.githubusercontent.com/prompt-toolkit/python-prompt-toolkit/master/LICENSE,BSD-3 +protobuf,https://raw.githubusercontent.com/protocolbuffers/protobuf/master/LICENSE,BSD-3 +ptyprocess,https://raw.githubusercontent.com/pexpect/ptyprocess/master/LICENSE,ISC +pyarrow,https://raw.githubusercontent.com/apache/arrow/master/LICENSE.txt,Apache 2.0 +pyasn1,https://raw.githubusercontent.com/etingof/pyasn1/master/LICENSE.rst,BSD-2 +pyasn1-modules,https://raw.githubusercontent.com/etingof/pyasn1-modules/master/LICENSE.txt,BSD-2 +pydot,https://raw.githubusercontent.com/pydot/pydot/master/LICENSE,MIT +pymongo,https://raw.githubusercontent.com/mongodb/mongo-python-driver/master/LICENSE,Apache 2.0 +pyparsing,https://raw.githubusercontent.com/pyparsing/pyparsing/master/LICENSE,MIT +pyrsistent,https://raw.githubusercontent.com/tobgu/pyrsistent/master/LICENCE.mit,MIT +python-dateutil,https://raw.githubusercontent.com/dateutil/dateutil/master/LICENSE,Apache 2.0 +pymongo,https://raw.githubusercontent.com/mongodb/mongo-python-driver/master/LICENSE,Apache 2.0 pytz,http://pytz.sourceforge.net/index.html#license,MIT -pyzmq,https://github.com/zeromq/pyzmq/blob/master/COPYING.BSD,BSD-3 -qtconsole,https://github.com/jupyter/qtconsole/blob/master/LICENSE,BSD-3 -requests,https://github.com/psf/requests/blob/master/LICENSE,Apache 2.0 -requests-oauthlib,https://github.com/requests/requests-oauthlib/blob/master/LICENSE,ISC -rsa,https://github.com/sybrenstuvel/python-rsa/blob/master/LICENSE,Apache 2.0 -scikit-learn,https://github.com/scikit-learn/scikit-learn/blob/master/COPYING,BSD-3 -scipy,https://github.com/scipy/scipy/blob/master/LICENSE.txt,BSD-3 -six,https://github.com/benjaminp/six/blob/master/LICENSE,MIT -tensorboard,https://github.com/tensorflow/tensorboard/blob/master/LICENSE,Apache 2.0 -tensorflow,https://github.com/tensorflow/tensorflow/blob/master/LICENSE,Apache 2.0 -tensorflow-data-validation,https://github.com/tensorflow/data-validation/blob/master/LICENSE,Apache 2.0 -tensorflow-estimator,https://github.com/tensorflow/estimator/blob/master/LICENSE,Apache 2.0 -tensorflow-metadata,https://github.com/tensorflow/metadata/blob/master/LICENSE,Apache 2.0 -tensorflow-model-analysis,https://github.com/tensorflow/model-analysis/blob/master/LICENSE,Apache 2.0 -tensorflow-transform,https://github.com/tensorflow/transform/blob/master/LICENSE,Apache 2.0 -termcolor,https://github.com/tartley/colorama/blob/master/LICENSE.txt,BSD-3 -terminado,https://github.com/jupyter/terminado/blob/master/LICENSE,BSD -testpath,https://github.com/jupyter/testpath/blob/master/LICENSE,BSD-3 -tornado,https://github.com/tornadoweb/tornado/blob/master/LICENSE,Apache 2.0 -traitlets,https://github.com/ipython/traitlets/blob/master/COPYING.md,BSD-3 -uritemplate,https://github.com/python-hyper/uritemplate/blob/master/LICENSE,Apache 2.0 & BSD -urllib3,https://github.com/urllib3/urllib3/blob/master/LICENSE.txt,MIT -wcwidth,https://github.com/jquast/wcwidth/blob/master/LICENSE.txt,MIT -webencodings,https://github.com/gsnedders/python-webencodings/blob/master/LICENSE,BSD-3 -widgetsnbextension,https://github.com/jupyter-widgets/ipywidgets/blob/master/widgetsnbextension/LICENSE,BSD-3 +pyzmq,https://raw.githubusercontent.com/zeromq/pyzmq/master/COPYING.BSD,BSD-3 +qtconsole,https://raw.githubusercontent.com/jupyter/qtconsole/master/LICENSE,BSD-3 +requests,https://raw.githubusercontent.com/psf/requests/master/LICENSE,Apache 2.0 +requests-oauthlib,https://raw.githubusercontent.com/requests/requests-oauthlib/master/LICENSE,ISC +rsa,https://raw.githubusercontent.com/sybrenstuvel/python-rsa/master/LICENSE,Apache 2.0 +scikit-learn,https://raw.githubusercontent.com/scikit-learn/scikit-learn/master/COPYING,BSD-3 +scipy,https://raw.githubusercontent.com/scipy/scipy/master/LICENSE.txt,BSD-3 +six,https://raw.githubusercontent.com/benjaminp/six/master/LICENSE,MIT +tensorboard,https://raw.githubusercontent.com/tensorflow/tensorboard/master/LICENSE,Apache 2.0 +tensorflow,https://raw.githubusercontent.com/tensorflow/tensorflow/master/LICENSE,Apache 2.0 +tensorflow-data-validation,https://raw.githubusercontent.com/tensorflow/data-validation/master/LICENSE,Apache 2.0 +tensorflow-estimator,https://raw.githubusercontent.com/tensorflow/estimator/master/LICENSE,Apache 2.0 +tensorflow-metadata,https://raw.githubusercontent.com/tensorflow/metadata/master/LICENSE,Apache 2.0 +tensorflow-model-analysis,https://raw.githubusercontent.com/tensorflow/model-analysis/master/LICENSE,Apache 2.0 +tensorflow-transform,https://raw.githubusercontent.com/tensorflow/transform/master/LICENSE,Apache 2.0 +termcolor,https://raw.githubusercontent.com/tartley/colorama/master/LICENSE.txt,BSD-3 +terminado,https://raw.githubusercontent.com/jupyter/terminado/master/LICENSE,BSD +testpath,https://raw.githubusercontent.com/jupyter/testpath/master/LICENSE,BSD-3 +tornado,https://raw.githubusercontent.com/tornadoweb/tornado/master/LICENSE,Apache 2.0 +traitlets,https://raw.githubusercontent.com/ipython/traitlets/master/COPYING.md,BSD-3 +uritemplate,https://raw.githubusercontent.com/python-hyper/uritemplate/master/LICENSE,Apache 2.0 & BSD +urllib3,https://raw.githubusercontent.com/urllib3/urllib3/master/LICENSE.txt,MIT +wcwidth,https://raw.githubusercontent.com/jquast/wcwidth/master/LICENSE.txt,MIT +webencodings,https://raw.githubusercontent.com/gsnedders/python-webencodings/master/LICENSE,BSD-3 +widgetsnbextension,https://raw.githubusercontent.com/jupyter-widgets/ipywidgets/master/widgetsnbextension/LICENSE,BSD-3 +zipp,https://raw.githubusercontent.com/jaraco/zipp/master/LICENSE,MIT +google-pasta,https://raw.githubusercontent.com/google/pasta/master/LICENSE,Apache 2.0 +opt-einsum,https://raw.githubusercontent.com/dgasmith/opt_einsum/master/LICENSE,MIT +tensorflow-serving-api,https://raw.githubusercontent.com/tensorflow/serving/master/LICENSE,Apache 2.0 +tfx-bsl,https://raw.githubusercontent.com/tensorflow/tfx-bsl/master/LICENSE,Apache 2.0 +wrapt,https://raw.githubusercontent.com/GrahamDumpleton/wrapt/develop/LICENSE,2-Clause BSD +asn1crypto,https://raw.githubusercontent.com/wbond/asn1crypto/master/LICENSE,MIT +cryptography,https://raw.githubusercontent.com/pyca/cryptography/master/LICENSE,Apache or BSD +keyring,https://raw.githubusercontent.com/jaraco/keyring/master/LICENSE,MIT +keyrings.alt,https://raw.githubusercontent.com/jaraco/keyrings.alt/master/LICENSE,MIT +pycrypto,https://raw.githubusercontent.com/dlitz/pycrypto/master/COPYRIGHT,Public Domain +pygobject,https://raw.githubusercontent.com/GNOME/pygobject/mainline/COPYING,LGPL +pyxdg,https://cgit.freedesktop.org/xdg/pyxdg/plain/COPYING,LGPL +SecretStorage,https://raw.githubusercontent.com/mitya57/secretstorage/master/LICENSE,BSD-3 +Mako,https://raw.githubusercontent.com/sqlalchemy/mako/master/LICENSE,MIT +typing-extensions,https://raw.githubusercontent.com/python/typing/master/typing_extensions/LICENSE,Python Software Foundation License +QtPy,https://raw.githubusercontent.com/spyder-ide/qtpy/master/LICENSE.txt,MIT diff --git a/backend/src/apiserver/visualization/third_party_licenses/Jinja2.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/Jinja2.LICENSE new file mode 100644 index 00000000000..c37cae49ec7 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/Jinja2.LICENSE @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/Keras-Applications.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/Keras-Applications.LICENSE new file mode 100644 index 00000000000..3229a592c94 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/Keras-Applications.LICENSE @@ -0,0 +1,34 @@ +COPYRIGHT + +Copyright (c) 2016 - 2018, the respective contributors. +All rights reserved. + +Each contributor holds copyright over their respective contributions. +The project versioning (Git) records all such contribution source information. +The initial code of this repository came from https://github.com/keras-team/keras +(the Keras repository), hence, for author information regarding commits +that occured earlier than the first commit in the present repository, +please see the original Keras repository. + +LICENSE + +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/Keras-Preprocessing.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/Keras-Preprocessing.LICENSE new file mode 100644 index 00000000000..66aa081766c --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/Keras-Preprocessing.LICENSE @@ -0,0 +1,34 @@ +COPYRIGHT + +Copyright (c) 2015 - 2018, the respective contributors. +All rights reserved. + +Each contributor holds copyright over their respective contributions. +The project versioning (Git) records all such contribution source information. +The initial code of this repository came from https://github.com/keras-team/keras +(the Keras repository), hence, for author information regarding commits +that occured earlier than the first commit in the present repository, +please see the original Keras repository. + +LICENSE + +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/Mako.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/Mako.LICENSE new file mode 100644 index 00000000000..1f835e94a1c --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/Mako.LICENSE @@ -0,0 +1,19 @@ +Copyright 2006-2020 the Mako authors and contributors . + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/backend/src/apiserver/visualization/third_party_licenses/Markdown.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/Markdown.LICENSE new file mode 100644 index 00000000000..2652d97ad1b --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/Markdown.LICENSE @@ -0,0 +1,29 @@ +Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later) +Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b) +Copyright 2004 Manfred Stienstra (the original version) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +* Neither the name of the Python Markdown Project nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE PYTHON MARKDOWN PROJECT ''AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL ANY CONTRIBUTORS TO THE PYTHON MARKDOWN PROJECT +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/MarkupSafe.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/MarkupSafe.LICENSE new file mode 100644 index 00000000000..9d227a0cc43 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/MarkupSafe.LICENSE @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/Pillow.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/Pillow.LICENSE new file mode 100644 index 00000000000..4aac532f486 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/Pillow.LICENSE @@ -0,0 +1,30 @@ +The Python Imaging Library (PIL) is + + Copyright © 1997-2011 by Secret Labs AB + Copyright © 1995-2011 by Fredrik Lundh + +Pillow is the friendly PIL fork. It is + + Copyright © 2010-2020 by Alex Clark and contributors + +Like PIL, Pillow is licensed under the open source PIL Software License: + +By obtaining, using, and/or copying this software and/or its associated +documentation, you agree that you have read, understood, and will comply +with the following terms and conditions: + +Permission to use, copy, modify, and distribute this software and its +associated documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appears in all copies, and that +both that copyright notice and this permission notice appear in supporting +documentation, and that the name of Secret Labs AB or the author not be +used in advertising or publicity pertaining to distribution of the software +without specific, written prior permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. +IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, +INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/PyYAML.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/PyYAML.LICENSE new file mode 100644 index 00000000000..3d82c281ee8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/PyYAML.LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2017-2020 Ingy döt Net +Copyright (c) 2006-2016 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/Pygments.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/Pygments.LICENSE new file mode 100644 index 00000000000..13d1c74b49a --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/Pygments.LICENSE @@ -0,0 +1,25 @@ +Copyright (c) 2006-2019 by the respective authors (see AUTHORS file). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/QtPy.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/QtPy.LICENSE new file mode 100644 index 00000000000..4a791817c0c --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/QtPy.LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) The Spyder Development Team + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/SecretStorage.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/SecretStorage.LICENSE new file mode 100644 index 00000000000..a343a622c0f --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/SecretStorage.LICENSE @@ -0,0 +1,25 @@ +Copyright 2012-2018 Dmitry Shachnev +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of the University nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/Send2Trash.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/Send2Trash.LICENSE new file mode 100644 index 00000000000..221c8299a40 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/Send2Trash.LICENSE @@ -0,0 +1,10 @@ +Copyright (c) 2017, Virgil Dupras +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + * Neither the name of Hardcoded Software Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/Werkzeug.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/Werkzeug.LICENSE new file mode 100644 index 00000000000..c37cae49ec7 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/Werkzeug.LICENSE @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/absl-py.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/absl-py.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/absl-py.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/apache-beam.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/apache-beam.LICENSE new file mode 100644 index 00000000000..44c35bb1a01 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/apache-beam.LICENSE @@ -0,0 +1,663 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + A part of several convenience binary distributions of this software is licensed as follows: + + Google Protobuf: + Copyright 2008 Google Inc. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + Code generated by the Protocol Buffer compiler is owned by the owner + of the input file used when generating it. This code is not + standalone and requires a support library to be linked with it. This + support library is itself covered by the above license. + + jsr-305: + Copyright (c) 2007-2009, JSR305 expert group + All rights reserved. + + https://opensource.org/licenses/BSD-3-Clause + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the JSR305 expert group nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, + THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. + + janino-compiler: + Janino - An embedded Java[TM] compiler + + Copyright (c) 2001-2016, Arno Unkrig + Copyright (c) 2015-2016 TIBCO Software Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + 3. Neither the name of JANINO nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER + IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + jline: + Copyright (c) 2002-2016, the original author or authors. + All rights reserved. + + http://www.opensource.org/licenses/bsd-license.php + + Redistribution and use in source and binary forms, with or + without modification, are permitted provided that the following + conditions are met: + + Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with + the distribution. + + Neither the name of JLine nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, + BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO + EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, + OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + OF THE POSSIBILITY OF SUCH DAMAGE. + + sqlline: + SQLLine - Shell for issuing SQL to relational databases via JDBC + + Copyright (c) 2002,2003,2004,2005,2006,2007 Marc Prud'hommeaux + Copyright (c) 2004-2010 The Eigenbase Project + Copyright (c) 2013-2017 Julian Hyde + All rights reserved. + + =============================================================================== + + Licensed under the Modified BSD License (the "License"); you may not + use this file except in compliance with the License. You may obtain a + copy of the License at: + + http://opensource.org/licenses/BSD-3-Clause + + Redistribution and use in source and binary forms, + with or without modification, are permitted provided + that the following conditions are met: + + (1) Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + (2) Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the + distribution. + + (3) The name of the author may not be used to endorse or promote + products derived from this software without specific prior written + permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + slf4j: + Copyright (c) 2004-2017 QOS.ch + All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE + +################################################################################ +CPython LICENSE. Source: +https://github.com/python/cpython/blob/81574b80e92554adf75c13fa42415beb8be383cb/LICENSE + +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018 Python Software Foundation; All +Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/asn1crypto.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/asn1crypto.LICENSE new file mode 100644 index 00000000000..8038d9a71ff --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/asn1crypto.LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2015-2019 Will Bond + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/astor.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/astor.LICENSE new file mode 100644 index 00000000000..e3c940f0f61 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/astor.LICENSE @@ -0,0 +1,29 @@ +Copyright (c) 2012, Patrick Maupin +Copyright (c) 2013, Berker Peksag +Copyright (c) 2008, Armin Ronacher +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/attrs.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/attrs.LICENSE new file mode 100644 index 00000000000..7ae3df93097 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/attrs.LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/avro-python3.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/avro-python3.LICENSE new file mode 100644 index 00000000000..7e159a69bc2 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/avro-python3.LICENSE @@ -0,0 +1,570 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License for the Jansson C JSON parser used in the C implementation: + +Copyright (c) 2009-2011 Petri Lehtinen + +Some files include an additional copyright notice: +* lang/c/jansson/src/pack_unpack.c + Copyright (c) 2011 Graeme Smecher +* lang/c/jansson/test/suites/api/test_unpack.c + Copyright (c) 2011 Graeme Smecher +* lang/c/jansson/src/memory.c + Copyright (c) 2011 Basile Starynkevitch + +| Permission is hereby granted, free of charge, to any person obtaining a copy +| of this software and associated documentation files (the "Software"), to deal +| in the Software without restriction, including without limitation the rights +| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +| copies of the Software, and to permit persons to whom the Software is +| furnished to do so, subject to the following conditions: +| +| The above copyright notice and this permission notice shall be included in +| all copies or substantial portions of the Software. +| +| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +| THE SOFTWARE. + +---------------------------------------------------------------------- +License for msinttypes.h and msstdint.h used in the C implementation: +Source from: +https://code.google.com/p/msinttypes/downloads/detail?name=msinttypes-r26.zip + +Copyright (c) 2006-2008 Alexander Chemeris + +| Redistribution and use in source and binary forms, with or without +| modification, are permitted provided that the following conditions are met: +| +| 1. Redistributions of source code must retain the above copyright notice, +| this list of conditions and the following disclaimer. +| +| 2. Redistributions in binary form must reproduce the above copyright +| notice, this list of conditions and the following disclaimer in the +| documentation and/or other materials provided with the distribution. +| +| 3. The name of the author may be used to endorse or promote products +| derived from this software without specific prior written permission. +| +| THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED +| WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +| MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +| EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +| SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +| PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +| OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +| WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +| OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +| ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License for st.c and st.h used in the C implementation: + +| This is a public domain general purpose hash table package written by +| Peter Moore @ UCB. + +---------------------------------------------------------------------- +License for Dirent API for Microsoft Visual Studio used in the C implementation: +Source from: +http://www.softagalleria.net/download/dirent/dirent-1.11.zip + +Copyright (C) 2006 Toni Ronkko + +| Permission is hereby granted, free of charge, to any person obtaining +| a copy of this software and associated documentation files (the +| ``Software''), to deal in the Software without restriction, including +| without limitation the rights to use, copy, modify, merge, publish, +| distribute, sublicense, and/or sell copies of the Software, and to +| permit persons to whom the Software is furnished to do so, subject to +| the following conditions: +| +| The above copyright notice and this permission notice shall be included +| in all copies or substantial portions of the Software. +| +| THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND, EXPRESS +| OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +| IN NO EVENT SHALL TONI RONKKO BE LIABLE FOR ANY CLAIM, DAMAGES OR +| OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +| ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +| OTHER DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License for ivy-2.2.0.jar used in the python implementation: + +Apache License version 2.0 (see above) + +---------------------------------------------------------------------- +License for pyAntTasks-1.3.jar used in the python implementation: + +Apache License version 2.0 (see above) + +---------------------------------------------------------------------- +License for NUnit binary included with the C# implementation: +File: nunit.framework.dll + +| NUnit License +| +| Copyright © 2002-2015 Charlie Poole +| Copyright © 2002-2004 James W. Newkirk, Michael C. Two, Alexei A. Vorontsov +| Copyright © 2000-2002 Philip A. Craig +| +| This software is provided 'as-is', without any express or implied warranty. In +| no event will the authors be held liable for any damages arising from the use +| of this software. +| +| Permission is granted to anyone to use this software for any purpose, including +| commercial applications, and to alter it and redistribute it freely, subject to +| the following restrictions: +| +| The origin of this software must not be misrepresented; you must not claim that +| you wrote the original software. If you use this software in a product, an +| acknowledgment (see the following) in the product documentation is required. +| +| Portions Copyright © 2002-2012 Charlie Poole or Copyright © 2002-2004 James W. +| Newkirk, Michael C. Two, Alexei A. Vorontsov or Copyright © 2000-2002 Philip A. +| Craig +| +| Altered source versions must be plainly marked as such, and must not be +| misrepresented as being the original software. +| +| This notice may not be removed or altered from any source distribution. +| License Note +| +| This license is based on the open source zlib/libpng license. The idea was to +| keep the license as simple as possible to encourage use of NUnit in free and +| commercial applications and libraries, but to keep the source code together and +| to give credit to the NUnit contributors for their efforts. While this license +| allows shipping NUnit in source and binary form, if shipping a NUnit variant is +| the sole purpose of your product, please let us know. + +---------------------------------------------------------------------- +License for the Json.NET binary included with the C# implementation: +File: Newtonsoft.Json.dll + +Copyright (c) 2007 James Newton-King + +| Permission is hereby granted, free of charge, to any person obtaining +| a copy of this software and associated documentation files (the +| "Software"), to deal in the Software without restriction, including +| without limitation the rights to use, copy, modify, merge, publish, +| distribute, sublicense, and/or sell copies of the Software, and to +| permit persons to whom the Software is furnished to do so, subject to +| the following conditions: +| +| The above copyright notice and this permission notice shall be +| included in all copies or substantial portions of the Software. +| +| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +| NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +| LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +| OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +| WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License for the Castle Core binary included with the C# implementation: +File: Castle.Core.dll + +Copyright (c) 2004-2015 Castle Project + +License: Apache License version 2.0 (see above) +URL: https://opensource.org/licenses/Apache-2.0 + +---------------------------------------------------------------------- +License for the log4net binary included with the C# implementation: +File: log4net.dll + +Copyright 2004-2015 The Apache Software Foundation. + +License: Apache License version 2.0 (see above) + +---------------------------------------------------------------------- +License for the m4 macros used by the C++ implementation: + +Files: +* lang/c++/m4/m4_ax_boost_system.m4 + Copyright (c) 2008 Thomas Porschberg + Copyright (c) 2008 Michael Tindal + Copyright (c) 2008 Daniel Casimiro +* lang/c++/m4/m4_ax_boost_asio.m4 + Copyright (c) 2008 Thomas Porschberg + Copyright (c) 2008 Pete Greenwell +* lang/c++/m4/m4_ax_boost_filesystem.m4 + Copyright (c) 2009 Thomas Porschberg + Copyright (c) 2009 Michael Tindal + Copyright (c) 2009 Roman Rybalko +* lang/c++/m4/m4_ax_boost_thread.m4 + Copyright (c) 2009 Thomas Porschberg + Copyright (c) 2009 Michael Tindal +* lang/c++/m4/m4_ax_boost_regex.m4 + Copyright (c) 2008 Thomas Porschberg + Copyright (c) 2008 Michael Tindal +* lang/c++/m4/m4_ax_boost_base.m4 + Copyright (c) 2008 Thomas Porschberg + +License text: +| Copying and distribution of this file, with or without modification, are +| permitted in any medium without royalty provided the copyright notice +| and this notice are preserved. This file is offered as-is, without any +| warranty. + +---------------------------------------------------------------------- +License for the AVRO_BOOT_NO_TRAIT code in the C++ implementation: +File: lang/c++/api/Boost.hh + +| Boost Software License - Version 1.0 - August 17th, 2003 +| +| Permission is hereby granted, free of charge, to any person or organization +| obtaining a copy of the software and accompanying documentation covered by +| this license (the "Software") to use, reproduce, display, distribute, +| execute, and transmit the Software, and to prepare derivative works of the +| Software, and to permit third-parties to whom the Software is furnished to +| do so, all subject to the following: +| +| The copyright notices in the Software and this entire statement, including +| the above license grant, this restriction and the following disclaimer, +| must be included in all copies of the Software, in whole or in part, and +| all derivative works of the Software, unless such copies or derivative +| works are solely in the form of machine-executable object code generated by +| a source language processor. +| +| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +| FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +| SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +| FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +| ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +| DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License for jquery.tipsy.js, tipsy.js, and tipsy.css used by the Java IPC implementation: + +Copyright (c) 2008 Jason Frame (jason@onehackoranother.com) + +| Permission is hereby granted, free of charge, to any person obtaining a copy +| of this software and associated documentation files (the "Software"), to deal +| in the Software without restriction, including without limitation the rights +| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +| copies of the Software, and to permit persons to whom the Software is +| furnished to do so, subject to the following conditions: +| +| The above copyright notice and this permission notice shall be included in +| all copies or substantial portions of the Software. +| +| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +| THE SOFTWARE. + +---------------------------------------------------------------------- +License for protovis-r3.2.js used by the Java IPC implementation: + +Copyright (c) 2010, Stanford Visualization Group +All rights reserved. + +| Redistribution and use in source and binary forms, with or without modification, +| are permitted provided that the following conditions are met: +| +| * Redistributions of source code must retain the above copyright notice, +| this list of conditions and the following disclaimer. +| +| * Redistributions in binary form must reproduce the above copyright notice, +| this list of conditions and the following disclaimer in the documentation +| and/or other materials provided with the distribution. +| +| * Neither the name of Stanford University nor the names of its contributors +| may be used to endorse or promote products derived from this software +| without specific prior written permission. +| +| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +| ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +| WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +| DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +| ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +| (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +| LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +| ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +| (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +| SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License for g.Raphael 0.4.1 used by the Java IPC implementation: + +Copyright (c) 2009 Dmitry Baranovskiy (http://g.raphaeljs.com) +Licensed under the MIT (https://www.opensource.org/licenses/mit-license.php) license. + +---------------------------------------------------------------------- +License for jQuery v1.4.2 used by the Java IPC implementation: + +Copyright 2010, John Resig +Dual licensed under the MIT or GPL Version 2 licenses. +https://jquery.org/license + +jQuery includes Sizzle.js +https://sizzlejs.com/ +Copyright 2010, The Dojo Foundation +Released under the MIT, BSD, and GPL Licenses. + +Both are included under the terms of the MIT license: + +| Permission is hereby granted, free of charge, to any person obtaining a copy +| of this software and associated documentation files (the "Software"), to deal +| in the Software without restriction, including without limitation the rights +| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +| copies of the Software, and to permit persons to whom the Software is +| furnished to do so, subject to the following conditions: +| +| The above copyright notice and this permission notice shall be included in +| all copies or substantial portions of the Software. +| +| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +| THE SOFTWARE. + +---------------------------------------------------------------------- +License for portions of idl.jj in the Java compiler implementation: + +Portions of idl.jj were modeled after the example Java 1.5 +parser included with JavaCC. For those portions: + +Copyright (c) 2006, Sun Microsystems, Inc. +All rights reserved. + +| Redistribution and use in source and binary forms, with or without +| modification, are permitted provided that the following conditions are met: +| +| * Redistributions of source code must retain the above copyright notice, +| this list of conditions and the following disclaimer. +| * Redistributions in binary form must reproduce the above copyright +| notice, this list of conditions and the following disclaimer in the +| documentation and/or other materials provided with the distribution. +| * Neither the name of the Sun Microsystems, Inc. nor the names of its +| contributors may be used to endorse or promote products derived from +| this software without specific prior written permission. +| +| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +| AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +| IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +| ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +| LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +| CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +| SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +| INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +| CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +| ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +| THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/avro.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/avro.LICENSE new file mode 100644 index 00000000000..7e159a69bc2 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/avro.LICENSE @@ -0,0 +1,570 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +---------------------------------------------------------------------- +License for the Jansson C JSON parser used in the C implementation: + +Copyright (c) 2009-2011 Petri Lehtinen + +Some files include an additional copyright notice: +* lang/c/jansson/src/pack_unpack.c + Copyright (c) 2011 Graeme Smecher +* lang/c/jansson/test/suites/api/test_unpack.c + Copyright (c) 2011 Graeme Smecher +* lang/c/jansson/src/memory.c + Copyright (c) 2011 Basile Starynkevitch + +| Permission is hereby granted, free of charge, to any person obtaining a copy +| of this software and associated documentation files (the "Software"), to deal +| in the Software without restriction, including without limitation the rights +| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +| copies of the Software, and to permit persons to whom the Software is +| furnished to do so, subject to the following conditions: +| +| The above copyright notice and this permission notice shall be included in +| all copies or substantial portions of the Software. +| +| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +| THE SOFTWARE. + +---------------------------------------------------------------------- +License for msinttypes.h and msstdint.h used in the C implementation: +Source from: +https://code.google.com/p/msinttypes/downloads/detail?name=msinttypes-r26.zip + +Copyright (c) 2006-2008 Alexander Chemeris + +| Redistribution and use in source and binary forms, with or without +| modification, are permitted provided that the following conditions are met: +| +| 1. Redistributions of source code must retain the above copyright notice, +| this list of conditions and the following disclaimer. +| +| 2. Redistributions in binary form must reproduce the above copyright +| notice, this list of conditions and the following disclaimer in the +| documentation and/or other materials provided with the distribution. +| +| 3. The name of the author may be used to endorse or promote products +| derived from this software without specific prior written permission. +| +| THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED +| WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +| MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +| EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +| SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +| PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +| OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +| WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +| OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +| ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License for st.c and st.h used in the C implementation: + +| This is a public domain general purpose hash table package written by +| Peter Moore @ UCB. + +---------------------------------------------------------------------- +License for Dirent API for Microsoft Visual Studio used in the C implementation: +Source from: +http://www.softagalleria.net/download/dirent/dirent-1.11.zip + +Copyright (C) 2006 Toni Ronkko + +| Permission is hereby granted, free of charge, to any person obtaining +| a copy of this software and associated documentation files (the +| ``Software''), to deal in the Software without restriction, including +| without limitation the rights to use, copy, modify, merge, publish, +| distribute, sublicense, and/or sell copies of the Software, and to +| permit persons to whom the Software is furnished to do so, subject to +| the following conditions: +| +| The above copyright notice and this permission notice shall be included +| in all copies or substantial portions of the Software. +| +| THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND, EXPRESS +| OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +| IN NO EVENT SHALL TONI RONKKO BE LIABLE FOR ANY CLAIM, DAMAGES OR +| OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +| ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +| OTHER DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License for ivy-2.2.0.jar used in the python implementation: + +Apache License version 2.0 (see above) + +---------------------------------------------------------------------- +License for pyAntTasks-1.3.jar used in the python implementation: + +Apache License version 2.0 (see above) + +---------------------------------------------------------------------- +License for NUnit binary included with the C# implementation: +File: nunit.framework.dll + +| NUnit License +| +| Copyright © 2002-2015 Charlie Poole +| Copyright © 2002-2004 James W. Newkirk, Michael C. Two, Alexei A. Vorontsov +| Copyright © 2000-2002 Philip A. Craig +| +| This software is provided 'as-is', without any express or implied warranty. In +| no event will the authors be held liable for any damages arising from the use +| of this software. +| +| Permission is granted to anyone to use this software for any purpose, including +| commercial applications, and to alter it and redistribute it freely, subject to +| the following restrictions: +| +| The origin of this software must not be misrepresented; you must not claim that +| you wrote the original software. If you use this software in a product, an +| acknowledgment (see the following) in the product documentation is required. +| +| Portions Copyright © 2002-2012 Charlie Poole or Copyright © 2002-2004 James W. +| Newkirk, Michael C. Two, Alexei A. Vorontsov or Copyright © 2000-2002 Philip A. +| Craig +| +| Altered source versions must be plainly marked as such, and must not be +| misrepresented as being the original software. +| +| This notice may not be removed or altered from any source distribution. +| License Note +| +| This license is based on the open source zlib/libpng license. The idea was to +| keep the license as simple as possible to encourage use of NUnit in free and +| commercial applications and libraries, but to keep the source code together and +| to give credit to the NUnit contributors for their efforts. While this license +| allows shipping NUnit in source and binary form, if shipping a NUnit variant is +| the sole purpose of your product, please let us know. + +---------------------------------------------------------------------- +License for the Json.NET binary included with the C# implementation: +File: Newtonsoft.Json.dll + +Copyright (c) 2007 James Newton-King + +| Permission is hereby granted, free of charge, to any person obtaining +| a copy of this software and associated documentation files (the +| "Software"), to deal in the Software without restriction, including +| without limitation the rights to use, copy, modify, merge, publish, +| distribute, sublicense, and/or sell copies of the Software, and to +| permit persons to whom the Software is furnished to do so, subject to +| the following conditions: +| +| The above copyright notice and this permission notice shall be +| included in all copies or substantial portions of the Software. +| +| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +| NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +| LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +| OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +| WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License for the Castle Core binary included with the C# implementation: +File: Castle.Core.dll + +Copyright (c) 2004-2015 Castle Project + +License: Apache License version 2.0 (see above) +URL: https://opensource.org/licenses/Apache-2.0 + +---------------------------------------------------------------------- +License for the log4net binary included with the C# implementation: +File: log4net.dll + +Copyright 2004-2015 The Apache Software Foundation. + +License: Apache License version 2.0 (see above) + +---------------------------------------------------------------------- +License for the m4 macros used by the C++ implementation: + +Files: +* lang/c++/m4/m4_ax_boost_system.m4 + Copyright (c) 2008 Thomas Porschberg + Copyright (c) 2008 Michael Tindal + Copyright (c) 2008 Daniel Casimiro +* lang/c++/m4/m4_ax_boost_asio.m4 + Copyright (c) 2008 Thomas Porschberg + Copyright (c) 2008 Pete Greenwell +* lang/c++/m4/m4_ax_boost_filesystem.m4 + Copyright (c) 2009 Thomas Porschberg + Copyright (c) 2009 Michael Tindal + Copyright (c) 2009 Roman Rybalko +* lang/c++/m4/m4_ax_boost_thread.m4 + Copyright (c) 2009 Thomas Porschberg + Copyright (c) 2009 Michael Tindal +* lang/c++/m4/m4_ax_boost_regex.m4 + Copyright (c) 2008 Thomas Porschberg + Copyright (c) 2008 Michael Tindal +* lang/c++/m4/m4_ax_boost_base.m4 + Copyright (c) 2008 Thomas Porschberg + +License text: +| Copying and distribution of this file, with or without modification, are +| permitted in any medium without royalty provided the copyright notice +| and this notice are preserved. This file is offered as-is, without any +| warranty. + +---------------------------------------------------------------------- +License for the AVRO_BOOT_NO_TRAIT code in the C++ implementation: +File: lang/c++/api/Boost.hh + +| Boost Software License - Version 1.0 - August 17th, 2003 +| +| Permission is hereby granted, free of charge, to any person or organization +| obtaining a copy of the software and accompanying documentation covered by +| this license (the "Software") to use, reproduce, display, distribute, +| execute, and transmit the Software, and to prepare derivative works of the +| Software, and to permit third-parties to whom the Software is furnished to +| do so, all subject to the following: +| +| The copyright notices in the Software and this entire statement, including +| the above license grant, this restriction and the following disclaimer, +| must be included in all copies of the Software, in whole or in part, and +| all derivative works of the Software, unless such copies or derivative +| works are solely in the form of machine-executable object code generated by +| a source language processor. +| +| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +| FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +| SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +| FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +| ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +| DEALINGS IN THE SOFTWARE. + +---------------------------------------------------------------------- +License for jquery.tipsy.js, tipsy.js, and tipsy.css used by the Java IPC implementation: + +Copyright (c) 2008 Jason Frame (jason@onehackoranother.com) + +| Permission is hereby granted, free of charge, to any person obtaining a copy +| of this software and associated documentation files (the "Software"), to deal +| in the Software without restriction, including without limitation the rights +| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +| copies of the Software, and to permit persons to whom the Software is +| furnished to do so, subject to the following conditions: +| +| The above copyright notice and this permission notice shall be included in +| all copies or substantial portions of the Software. +| +| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +| THE SOFTWARE. + +---------------------------------------------------------------------- +License for protovis-r3.2.js used by the Java IPC implementation: + +Copyright (c) 2010, Stanford Visualization Group +All rights reserved. + +| Redistribution and use in source and binary forms, with or without modification, +| are permitted provided that the following conditions are met: +| +| * Redistributions of source code must retain the above copyright notice, +| this list of conditions and the following disclaimer. +| +| * Redistributions in binary form must reproduce the above copyright notice, +| this list of conditions and the following disclaimer in the documentation +| and/or other materials provided with the distribution. +| +| * Neither the name of Stanford University nor the names of its contributors +| may be used to endorse or promote products derived from this software +| without specific prior written permission. +| +| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +| ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +| WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +| DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +| ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +| (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +| LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +| ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +| (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +| SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- +License for g.Raphael 0.4.1 used by the Java IPC implementation: + +Copyright (c) 2009 Dmitry Baranovskiy (http://g.raphaeljs.com) +Licensed under the MIT (https://www.opensource.org/licenses/mit-license.php) license. + +---------------------------------------------------------------------- +License for jQuery v1.4.2 used by the Java IPC implementation: + +Copyright 2010, John Resig +Dual licensed under the MIT or GPL Version 2 licenses. +https://jquery.org/license + +jQuery includes Sizzle.js +https://sizzlejs.com/ +Copyright 2010, The Dojo Foundation +Released under the MIT, BSD, and GPL Licenses. + +Both are included under the terms of the MIT license: + +| Permission is hereby granted, free of charge, to any person obtaining a copy +| of this software and associated documentation files (the "Software"), to deal +| in the Software without restriction, including without limitation the rights +| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +| copies of the Software, and to permit persons to whom the Software is +| furnished to do so, subject to the following conditions: +| +| The above copyright notice and this permission notice shall be included in +| all copies or substantial portions of the Software. +| +| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +| THE SOFTWARE. + +---------------------------------------------------------------------- +License for portions of idl.jj in the Java compiler implementation: + +Portions of idl.jj were modeled after the example Java 1.5 +parser included with JavaCC. For those portions: + +Copyright (c) 2006, Sun Microsystems, Inc. +All rights reserved. + +| Redistribution and use in source and binary forms, with or without +| modification, are permitted provided that the following conditions are met: +| +| * Redistributions of source code must retain the above copyright notice, +| this list of conditions and the following disclaimer. +| * Redistributions in binary form must reproduce the above copyright +| notice, this list of conditions and the following disclaimer in the +| documentation and/or other materials provided with the distribution. +| * Neither the name of the Sun Microsystems, Inc. nor the names of its +| contributors may be used to endorse or promote products derived from +| this software without specific prior written permission. +| +| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +| AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +| IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +| ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +| LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +| CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +| SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +| INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +| CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +| ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +| THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/backcall.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/backcall.LICENSE new file mode 100644 index 00000000000..75f097ee3ad --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/backcall.LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2014, Thomas Kluyver +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +* Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/bleach.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/bleach.LICENSE new file mode 100644 index 00000000000..467c38e4986 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/bleach.LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2014-2017, Mozilla Foundation + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/bokeh.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/bokeh.LICENSE new file mode 100644 index 00000000000..758436e88be --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/bokeh.LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, +this list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +Neither the name of Anaconda nor the names of any contributors +may be used to endorse or promote products derived from this software +without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/cachetools.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/cachetools.LICENSE new file mode 100644 index 00000000000..7da84f4c637 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/cachetools.LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014-2019 Thomas Kemmer + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/certifi.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/certifi.LICENSE new file mode 100644 index 00000000000..802b53ff11e --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/certifi.LICENSE @@ -0,0 +1,21 @@ +This packge contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1# +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/backend/src/apiserver/visualization/third_party_licenses/chardet.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/chardet.LICENSE new file mode 100644 index 00000000000..8add30ad590 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/chardet.LICENSE @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! + + diff --git a/backend/src/apiserver/visualization/third_party_licenses/crcmod.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/crcmod.LICENSE new file mode 100644 index 00000000000..ee9e2810a40 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/crcmod.LICENSE @@ -0,0 +1,246 @@ + + + + + + + Introduction — crcmod v1.7 documentation + + + + + + + + + + + + +
+
+
+
+ +
+

Introduction

+

The software in this package is a Python module for generating objects that +compute the Cyclic Redundancy Check (CRC). It includes a (optional) C +extension for fast calculation, as well as a pure Python implementation.

+

There is no attempt in this package to explain how the CRC works. There are a +number of resources on the web that give a good explanation of the algorithms. +Just do a Google search for “crc calculation” and browse till you find what you +need. Another resource can be found in chapter 20 of the book “Numerical +Recipes in C” by Press et. al.

+

This package allows the use of any 8, 16, 24, 32, or 64 bit CRC. You can +generate a Python function for the selected polynomial or an instance of the +crcmod.Crc class which provides the same interface as the +hashlib, md5 and sha modules from the Python standard +library. A crcmod.Crc class instance can also generate C/C++ source +code that can be used in another application.

+
+

Guidelines

+

Documentation is available here as well as from the doc strings.

+

It is up to you to decide what polynomials to use in your application. Some +common CRC algorithms are predefined in crcmod.predefined. If someone +has not specified the polynomials to use, you will need to do some research to +find one suitable for your application. Examples are available in the unit +test script test.py.

+

If you need to generate code for another language, I suggest you subclass the +crcmod.Crc class and replace the method +crcmod.Crc.generateCode(). Use crcmod.Crc.generateCode() as a +model for the new version.

+
+
+

Dependencies

+
+

Python Version

+

The package has separate code to support the 2.x and 3.x Python series.

+

For the 2.x versions of Python, these versions have been tested:

+
    +
  • 2.4
  • +
  • 2.5
  • +
  • 2.6
  • +
  • 2.7
  • +
+

It may still work on earlier versions of Python 2.x, but these have not been +recently tested.

+

For the 3.x versions of Python, these versions have been tested:

+
    +
  • 3.1
  • +
+
+
+

Building C extension

+

To build the C extension, the appropriate compiler tools for your platform must +be installed. Refer to the Python documentation for building C extensions for +details.

+
+
+
+

Installation

+

The crcmod package is installed using distutils. +Run the following command:

+
python setup.py install
+
+

If the extension module builds, it will be installed. Otherwise, the +installation will include the pure Python version. This will run significantly +slower than the extension module but will allow the package to be used.

+

For Windows users who want to use the mingw32 compiler, run this command:

+
python setup.py build --compiler=mingw32 install
+
+

For Python 3.x, the install process is the same but you need to use the 3.x +interpreter.

+
+
+

Unit Testing

+

The crcmod package has a module crcmod.test, which contains +unit tests for both crcmod and crcmod.predefined.

+

When you first install crcmod, you should run the unit tests to make +sure everything is installed properly. The test script performs a number of +tests including a comparison to the direct method which uses a class +implementing polynomials over the integers mod 2.

+

To run the unit tests on Python >=2.5:

+
python -m crcmod.test
+
+

Alternatively, in the test directory run:

+
python test_crcmod.py
+
+
+
+

Code Generation

+

The crcmod package is capable of generating C functions that can be +compiled with a C or C++ compiler. In the test directory, there is an +examples.py script that demonstrates how to use the code generator. +The result of this is written out to the file examples.c. The +generated code was checked to make sure it compiles with the GCC compiler.

+
+
+

License

+

The crcmod package is released under the MIT license. See the +LICENSE file for details.

+
+
+

References

+
+

See also

+
+
binascii.crc32() function from the binascii module
+
CRC-32 implementation
+
zlib.crc32() function from the zlib module
+
CRC-32 implementation
+
Module hashlib
+
Secure hash and message digest algorithms.
+
Module md5
+
RSA’s MD5 message digest algorithm.
+
Module sha
+
NIST’s secure hash algorithm, SHA.
+
Module hmac
+
Keyed-hashing for message authentication.
+
+
+
+
+ + +
+
+
+
+
+

Table Of Contents

+ + +

Previous topic

+

crcmod documentation

+

Next topic

+

crcmod – CRC calculation

+

This Page

+ + + +
+
+
+
+ + + + \ No newline at end of file diff --git a/backend/src/apiserver/visualization/third_party_licenses/cryptography.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/cryptography.LICENSE new file mode 100644 index 00000000000..fe5af51408c --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/cryptography.LICENSE @@ -0,0 +1,6 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made +under the terms of *both* these licenses. + +The code used in the OpenSSL locking callback and OS random engine is derived +from CPython, and is licensed under the terms of the PSF License Agreement. diff --git a/backend/src/apiserver/visualization/third_party_licenses/decorator.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/decorator.LICENSE new file mode 100644 index 00000000000..b0ade0487e9 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/decorator.LICENSE @@ -0,0 +1,26 @@ +Copyright (c) 2005-2018, Michele Simionato +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + Redistributions in bytecode form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS +OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE +USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/defusedxml.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/defusedxml.LICENSE new file mode 100644 index 00000000000..311690c68fe --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/defusedxml.LICENSE @@ -0,0 +1,49 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python +alone or in any derivative version, provided, however, that PSF's +License Agreement and PSF's notice of copyright, i.e., "Copyright (c) +2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative +version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/dill.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/dill.LICENSE new file mode 100644 index 00000000000..6a5598c090e --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/dill.LICENSE @@ -0,0 +1,35 @@ +Copyright (c) 2004-2016 California Institute of Technology. +Copyright (c) 2016-2020 The Uncertainty Quantification Foundation. +All rights reserved. + +This software is available subject to the conditions and terms laid +out below. By downloading and using this software you are agreeing +to the following conditions. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met:: + + - Redistribution of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + - Redistribution in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentations and/or other materials provided with the distribution. + + - Neither the names of the copyright holders nor the names of any of + the contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED +TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; +OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/docopt.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/docopt.LICENSE new file mode 100644 index 00000000000..58ff1bc8e7a --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/docopt.LICENSE @@ -0,0 +1,23 @@ +Copyright (c) 2012 Vladimir Keleshev, + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software +without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to +whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall +be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/entrypoints.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/entrypoints.LICENSE new file mode 100644 index 00000000000..11be2a75530 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/entrypoints.LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Thomas Kluyver and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/fastavro.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/fastavro.LICENSE new file mode 100644 index 00000000000..9e380df0335 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/fastavro.LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2011 Miki Tebeka + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/fasteners.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/fasteners.LICENSE new file mode 100644 index 00000000000..c332405d76b --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/fasteners.LICENSE @@ -0,0 +1,176 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + diff --git a/backend/src/apiserver/visualization/third_party_licenses/future.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/future.LICENSE new file mode 100644 index 00000000000..fa2e48006e3 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/future.LICENSE @@ -0,0 +1,663 @@ + + + + + + + + + Licensing and credits — Python-Future documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +Fork me on GitHub + + +
+
+ +
+ +
+

Licensing and credits

+
+

Licence

+

The software is distributed under an MIT licence. The text is as follows +(from LICENSE.txt):

+
Copyright (c) 2013-2016 Python Charmers Pty Ltd, Australia
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+
+
+ +
+

Authors

+

Python-Future is written and maintained by Ed Schofield with the help of +various contributors:

+
+

Development Lead

+ +
+
+

Patches

+
    +
  • Jacob Beck
  • +
  • Denis Cornehl
  • +
  • Nicolas Delaby
  • +
  • Corey Farwell
  • +
  • Eric Firing
  • +
  • Michael Joseph
  • +
  • Waldemar Kornewald
  • +
  • Alexey Kotlyarov
  • +
  • Lion Krischer
  • +
  • Marcin Kuzminski
  • +
  • Joshua Landau
  • +
  • German Larrain
  • +
  • Chris Lasher
  • +
  • Elliott Sales de Andrade
  • +
  • Tim Shaffer
  • +
  • Daniel Szoska
  • +
  • Jeff Tratner
  • +
  • Tim Tröndle
  • +
  • Brad Walker
  • +
  • Mystic-Mirage (GitHub)
  • +
  • str4d (GitHub)
  • +
  • 9seconds (GitHub)
  • +
  • Varriount (GitHub)
  • +
+
+
+

Suggestions and Feedback

+
    +
  • Chris Adams
  • +
  • Martijn Faassen
  • +
  • Lion Krischer
  • +
  • Danielle Madeley
  • +
  • Val Markovic
  • +
  • wluebbe (GitHub)
  • +
+
+
+
+

Other Credits

+
    +
  • The backported super() and range() functions are derived from Ryan +Kelly’s magicsuper module and Dan Crosta’s xrange module.
  • +
  • The futurize and pasteurize scripts use lib2to3, lib3to2, and +parts of Armin Ronacher’s python-modernize code.
  • +
  • The python_2_unicode_compatible decorator is from Django. The +implements_iterator and with_metaclass decorators are from Jinja2.
  • +
  • The exec_ function and some others in future.utils are from the +six module by Benjamin Peterson.
  • +
  • The raise_ and raise_with_traceback functions were contributed by +Jeff Tratner.
  • +
  • A working version of raise_from was contributed by Varriount (GitHub).
  • +
  • Documentation is generated with Sphinx using the +sphinx-bootstrap theme.
  • +
  • past.translation is inspired by and borrows some code from Sanjay Vinip’s +uprefix module.
  • +
+
+
+ + +
+ +
+
+ +
+
+

+ Back to top + +

+

+ © Copyright 2013-2016, Python Charmers Pty Ltd, Australia.
+

+
+
+ + + + \ No newline at end of file diff --git a/backend/src/apiserver/visualization/third_party_licenses/gast.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/gast.LICENSE new file mode 100644 index 00000000000..317cc08453d --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/gast.LICENSE @@ -0,0 +1,29 @@ +Copyright (c) 2016, Serge Guelton +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + Neither the name of HPCProject, Serge Guelton nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + diff --git a/backend/src/apiserver/visualization/third_party_licenses/gcsfs.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/gcsfs.LICENSE new file mode 100644 index 00000000000..a91fe2b2fd1 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/gcsfs.LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2017, Continuum Analytics, Inc. and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, +this list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +Neither the name of Continuum Analytics nor the names of any contributors +may be used to endorse or promote products derived from this software +without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF +THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-api-core.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-api-core.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-api-core.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-api-python-client.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-api-python-client.LICENSE new file mode 100644 index 00000000000..2987b3b95e9 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-api-python-client.LICENSE @@ -0,0 +1,22 @@ + Copyright 2014 Google Inc. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +Dependent Modules +================= + +This code has the following dependencies +above and beyond the Python standard library: + +uritemplates - Apache License 2.0 +httplib2 - MIT License diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-apitools.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-apitools.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-apitools.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-auth-httplib2.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-auth-httplib2.LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-auth-httplib2.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-auth-oauthlib.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-auth-oauthlib.LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-auth-oauthlib.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-auth.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-auth.LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-auth.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-cloud-bigquery.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-bigquery.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-bigquery.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-cloud-bigtable.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-bigtable.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-bigtable.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-cloud-core.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-core.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-core.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-cloud-datastore.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-datastore.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-datastore.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-cloud-dlp.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-dlp.LICENSE new file mode 100644 index 00000000000..a8ee855de2a --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-dlp.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-cloud-language.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-language.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-language.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-cloud-pubsub.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-pubsub.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-pubsub.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-cloud-spanner.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-spanner.LICENSE new file mode 100644 index 00000000000..a8ee855de2a --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-spanner.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-cloud-videointelligence.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-videointelligence.LICENSE new file mode 100644 index 00000000000..a8ee855de2a --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-videointelligence.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-cloud-vision.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-vision.LICENSE new file mode 100644 index 00000000000..a8ee855de2a --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-cloud-vision.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-pasta.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-pasta.LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-pasta.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/google-resumable-media.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/google-resumable-media.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/google-resumable-media.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/googleapis-common-protos.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/googleapis-common-protos.LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/googleapis-common-protos.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/grpc-google-iam-v1.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/grpc-google-iam-v1.LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/grpc-google-iam-v1.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/grpcio-gcp.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/grpcio-gcp.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/grpcio-gcp.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/grpcio.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/grpcio.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/grpcio.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/h5py.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/h5py.LICENSE new file mode 100644 index 00000000000..28ca56277f0 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/h5py.LICENSE @@ -0,0 +1,30 @@ +Copyright (c) 2008 Andrew Collette and contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the + distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/hdfs.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/hdfs.LICENSE new file mode 100644 index 00000000000..7fdea0a686a --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/hdfs.LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2013, Matthieu Monsch. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/httplib2.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/httplib2.LICENSE new file mode 100644 index 00000000000..ae382866939 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/httplib2.LICENSE @@ -0,0 +1,23 @@ +Httplib2 Software License + +Copyright (c) 2006 by Joe Gregorio + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of the Software, +and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/idna.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/idna.LICENSE new file mode 100644 index 00000000000..63664b82e7a --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/idna.LICENSE @@ -0,0 +1,34 @@ +License +------- + +License: bsd-3-clause + +Copyright (c) 2013-2020, Kim Davies. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +#. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +#. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided with + the distribution. + +#. Neither the name of the copyright holder nor the names of the + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +#. THIS SOFTWARE IS PROVIDED BY THE CONTRIBUTORS "AS IS" AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE + USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/importlib-metadata.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/importlib-metadata.LICENSE new file mode 100644 index 00000000000..52b16d9a1a5 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/importlib-metadata.LICENSE @@ -0,0 +1,293 @@ + + + + + + + + + + + + + + + + + + + + +Sign in · GitLab + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + +
+ + diff --git a/backend/src/apiserver/visualization/third_party_licenses/ipykernel.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/ipykernel.LICENSE new file mode 100644 index 00000000000..93f45a894c6 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/ipykernel.LICENSE @@ -0,0 +1,59 @@ +# Licensing terms + +This project is licensed under the terms of the Modified BSD License +(also known as New or Revised or 3-Clause BSD), as follows: + +- Copyright (c) 2015, IPython Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of the IPython Development Team nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +## About the IPython Development Team + +The IPython Development Team is the set of all contributors to the IPython project. +This includes all of the IPython subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/ipython/. + +## Our Copyright Policy + +IPython uses a shared copyright model. Each contributor maintains copyright +over their contributions to IPython. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the IPython +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire IPython +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the IPython repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + + # Copyright (c) IPython Development Team. + # Distributed under the terms of the Modified BSD License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/ipython-genutils.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/ipython-genutils.LICENSE new file mode 100644 index 00000000000..87b47806b1a --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/ipython-genutils.LICENSE @@ -0,0 +1,59 @@ +# Licensing terms + +This project is licensed under the terms of the Modified BSD License +(also known as New or Revised or 3-Clause BSD), as follows: + +- Copyright (c) 2001-, IPython Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of the IPython Development Team nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +## About the IPython Development Team + +The IPython Development Team is the set of all contributors to the IPython project. +This includes all of the IPython subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/jupyter/. + +## Our Copyright Policy + +IPython uses a shared copyright model. Each contributor maintains copyright +over their contributions to IPython. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the IPython +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire IPython +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the IPython repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + + # Copyright (c) IPython Development Team. + # Distributed under the terms of the Modified BSD License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/ipython.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/ipython.LICENSE new file mode 100644 index 00000000000..d4bb8d39dfe --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/ipython.LICENSE @@ -0,0 +1,33 @@ +BSD 3-Clause License + +- Copyright (c) 2008-Present, IPython Development Team +- Copyright (c) 2001-2007, Fernando Perez +- Copyright (c) 2001, Janko Hauser +- Copyright (c) 2001, Nathaniel Gray + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/ipywidgets.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/ipywidgets.LICENSE new file mode 100644 index 00000000000..deb2c38c8ec --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/ipywidgets.LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2015 Project Jupyter Contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/itables.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/itables.LICENSE new file mode 100644 index 00000000000..2ebfbf7dfc4 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/itables.LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Marc Wouts + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/jedi.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/jedi.LICENSE new file mode 100644 index 00000000000..94f954567b1 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/jedi.LICENSE @@ -0,0 +1,24 @@ +All contributions towards Jedi are MIT licensed. + +------------------------------------------------------------------------------- +The MIT License (MIT) + +Copyright (c) <2013> + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/joblib.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/joblib.LICENSE new file mode 100644 index 00000000000..0f469af8252 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/joblib.LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2008-2016, The joblib developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/jsonschema.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/jsonschema.LICENSE new file mode 100644 index 00000000000..af9cfbdb134 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/jsonschema.LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2013 Julian Berman + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/jupyter-client.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/jupyter-client.LICENSE new file mode 100644 index 00000000000..bd6397d4582 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/jupyter-client.LICENSE @@ -0,0 +1,60 @@ +# Licensing terms + +This project is licensed under the terms of the Modified BSD License +(also known as New or Revised or 3-Clause BSD), as follows: + +- Copyright (c) 2001-2015, IPython Development Team +- Copyright (c) 2015-, Jupyter Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of the Jupyter Development Team nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +## About the Jupyter Development Team + +The Jupyter Development Team is the set of all contributors to the Jupyter project. +This includes all of the Jupyter subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/jupyter/. + +## Our Copyright Policy + +Jupyter uses a shared copyright model. Each contributor maintains copyright +over their contributions to Jupyter. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the Jupyter +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire Jupyter +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the Jupyter repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + + # Copyright (c) Jupyter Development Team. + # Distributed under the terms of the Modified BSD License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/jupyter-console.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/jupyter-console.LICENSE new file mode 100644 index 00000000000..bd6397d4582 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/jupyter-console.LICENSE @@ -0,0 +1,60 @@ +# Licensing terms + +This project is licensed under the terms of the Modified BSD License +(also known as New or Revised or 3-Clause BSD), as follows: + +- Copyright (c) 2001-2015, IPython Development Team +- Copyright (c) 2015-, Jupyter Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of the Jupyter Development Team nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +## About the Jupyter Development Team + +The Jupyter Development Team is the set of all contributors to the Jupyter project. +This includes all of the Jupyter subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/jupyter/. + +## Our Copyright Policy + +Jupyter uses a shared copyright model. Each contributor maintains copyright +over their contributions to Jupyter. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the Jupyter +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire Jupyter +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the Jupyter repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + + # Copyright (c) Jupyter Development Team. + # Distributed under the terms of the Modified BSD License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/jupyter-core.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/jupyter-core.LICENSE new file mode 100644 index 00000000000..e5ae344d920 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/jupyter-core.LICENSE @@ -0,0 +1,61 @@ +# The Jupyter licensing terms + +Jupyter is licensed under the terms of the Modified BSD License (also known as +New or Revised or 3-Clause BSD), as follows: + +- Copyright (c) 2015-, Jupyter Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of the Jupyter Development Team nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +## About the Jupyter Development Team + +The Jupyter Development Team is the set of all contributors to the Jupyter +project. This includes all of the Jupyter subprojects. A full list with +details is kept in the documentation directory, in the file +`about/credits.txt`. + +The core team that coordinates development on GitHub can be found here: +https://github.com/ipython/. + +## Our Copyright Policy + +Jupyter uses a shared copyright model. Each contributor maintains copyright +over their contributions to Jupyter. It is important to note that these +contributions are typically only changes to the repositories. Thus, the Jupyter +source code in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire Jupyter +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the Jupyter repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + + # Copyright (c) Jupyter Development Team. + # Distributed under the terms of the Modified BSD License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/jupyter.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/jupyter.LICENSE new file mode 100644 index 00000000000..295746a56ef --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/jupyter.LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2017, Project Jupyter Contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/backend/src/apiserver/visualization/third_party_licenses/keyring.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/keyring.LICENSE new file mode 100644 index 00000000000..353924be0e5 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/keyring.LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/keyrings.alt.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/keyrings.alt.LICENSE new file mode 100644 index 00000000000..5e795a61f30 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/keyrings.alt.LICENSE @@ -0,0 +1,7 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/mistune.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/mistune.LICENSE new file mode 100644 index 00000000000..b141cdb9f3c --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/mistune.LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2014, Hsiaoming Yang + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the creator nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/mock.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/mock.LICENSE new file mode 100644 index 00000000000..c820924387d --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/mock.LICENSE @@ -0,0 +1,26 @@ +Copyright (c) 2003-2013, Michael Foord & the mock team +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/monotonic.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/monotonic.LICENSE new file mode 100644 index 00000000000..e06d2081865 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/monotonic.LICENSE @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/nbconvert.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/nbconvert.LICENSE new file mode 100644 index 00000000000..bd6397d4582 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/nbconvert.LICENSE @@ -0,0 +1,60 @@ +# Licensing terms + +This project is licensed under the terms of the Modified BSD License +(also known as New or Revised or 3-Clause BSD), as follows: + +- Copyright (c) 2001-2015, IPython Development Team +- Copyright (c) 2015-, Jupyter Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of the Jupyter Development Team nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +## About the Jupyter Development Team + +The Jupyter Development Team is the set of all contributors to the Jupyter project. +This includes all of the Jupyter subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/jupyter/. + +## Our Copyright Policy + +Jupyter uses a shared copyright model. Each contributor maintains copyright +over their contributions to Jupyter. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the Jupyter +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire Jupyter +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the Jupyter repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + + # Copyright (c) Jupyter Development Team. + # Distributed under the terms of the Modified BSD License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/nbformat.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/nbformat.LICENSE new file mode 100644 index 00000000000..bd6397d4582 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/nbformat.LICENSE @@ -0,0 +1,60 @@ +# Licensing terms + +This project is licensed under the terms of the Modified BSD License +(also known as New or Revised or 3-Clause BSD), as follows: + +- Copyright (c) 2001-2015, IPython Development Team +- Copyright (c) 2015-, Jupyter Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of the Jupyter Development Team nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +## About the Jupyter Development Team + +The Jupyter Development Team is the set of all contributors to the Jupyter project. +This includes all of the Jupyter subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/jupyter/. + +## Our Copyright Policy + +Jupyter uses a shared copyright model. Each contributor maintains copyright +over their contributions to Jupyter. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the Jupyter +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire Jupyter +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the Jupyter repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + + # Copyright (c) Jupyter Development Team. + # Distributed under the terms of the Modified BSD License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/notebook.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/notebook.LICENSE new file mode 100644 index 00000000000..d8478fc3779 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/notebook.LICENSE @@ -0,0 +1,58 @@ +This project is licensed under the terms of the Modified BSD License +(also known as New or Revised or 3-Clause BSD), as follows: + +- Copyright (c) 2001-2015, IPython Development Team +- Copyright (c) 2015-, Jupyter Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of the Jupyter Development Team nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +## About the Jupyter Development Team + +The Jupyter Development Team is the set of all contributors to the Jupyter project. +This includes all of the Jupyter subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/jupyter/. + +## Our Copyright Policy + +Jupyter uses a shared copyright model. Each contributor maintains copyright +over their contributions to Jupyter. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the Jupyter +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire Jupyter +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the Jupyter repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + + # Copyright (c) Jupyter Development Team. + # Distributed under the terms of the Modified BSD License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/numpy.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/numpy.LICENSE new file mode 100644 index 00000000000..8ce64521862 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/numpy.LICENSE @@ -0,0 +1,30 @@ +Copyright (c) 2005-2020, NumPy Developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the NumPy Developers nor the names of any + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/oauth2client.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/oauth2client.LICENSE new file mode 100644 index 00000000000..c8d76dfc541 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/oauth2client.LICENSE @@ -0,0 +1,210 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +Dependent Modules +================= + +This code has the following dependencies +above and beyond the Python standard library: + +httplib2 - MIT License diff --git a/backend/src/apiserver/visualization/third_party_licenses/oauthlib.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/oauthlib.LICENSE new file mode 100644 index 00000000000..d5a9e9acd07 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/oauthlib.LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2019 The OAuthlib Community +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of this project nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/opt-einsum.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/opt-einsum.LICENSE new file mode 100644 index 00000000000..e3d77df3e14 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/opt-einsum.LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2014 Daniel Smith + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/packaging.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/packaging.LICENSE new file mode 100644 index 00000000000..b740c9e82be --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/packaging.LICENSE @@ -0,0 +1,724 @@ + + + + + +Creative Commons — Attribution-ShareAlike 3.0 Unported + — CC BY-SA 3.0 + + + + + + + + + + + + + + + + + + + + +
+
+ + +
+ +
+
+ +This page is available in the following languages: + +Languages + +
+
+
+
+
+
+ + +

Creative Commons License Deed

+
+

+ + + + + +Attribution-ShareAlike 3.0 Unported + +(CC BY-SA 3.0) + +

+
+
+
+
+
+ +This is a human-readable summary of (and not a substitute for) the license. + + +Disclaimer. + +
+
+
+
+
+

You are free to:

+
    + +
  • +Adapt — remix, transform, and build upon the material +
  • +
  • +for any purpose, even commercially. +
  • +
  • + +
  • +
+
+
+ +This license is acceptable for Free Cultural Works. + + + + +
+
+
+
    +
  • The licensor cannot revoke these freedoms as long as you follow the license terms.
  • +
+
+

+
+

Under the following terms:

+
    +
  • +

    +AttributionYou must give appropriate credit, provide a link to the license, and indicate if changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use. + +

    + +
  • +
  • +

    +ShareAlike — If you remix, transform, or build upon the material, you must distribute your contributions under the same license as the original. + +

    +
  • +
+
+
+
    +
  • +No additional restrictions — You may not apply legal terms or technological measures that legally restrict others from doing anything the license permits. +
  • +
+
+

+
+

+Notices: +

+
    +
  • +You do not have to comply with the license for elements of the material in the public domain or where your use is permitted by an applicable exception or limitation. +
  • +
  • +No warranties are given. The license may not give you all of the permissions necessary for your intended use. For example, other rights such as publicity, privacy, or moral rights may limit how you use the material. +
  • +
+
+ +
+
+ + +
+
+
+
+ +
+
+ + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/backend/src/apiserver/visualization/third_party_licenses/pandas.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pandas.LICENSE new file mode 100644 index 00000000000..76954a5a339 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pandas.LICENSE @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2008-2011, AQR Capital Management, LLC, Lambda Foundry, Inc. and PyData Development Team +All rights reserved. + +Copyright (c) 2011-2020, Open source contributors. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/pandocfilters.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pandocfilters.LICENSE new file mode 100644 index 00000000000..330134ec3d1 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pandocfilters.LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013, John MacFarlane +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + - Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + - Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + - Neither the name of John Macfarlane nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/parso.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/parso.LICENSE new file mode 100644 index 00000000000..08c41db014e --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/parso.LICENSE @@ -0,0 +1,86 @@ +All contributions towards parso are MIT licensed. + +Some Python files have been taken from the standard library and are therefore +PSF licensed. Modifications on these files are dual licensed (both MIT and +PSF). These files are: + +- parso/pgen2/* +- parso/tokenize.py +- parso/token.py +- test/test_pgen2.py + +Also some test files under test/normalizer_issue_files have been copied from +https://github.com/PyCQA/pycodestyle (Expat License == MIT License). + +------------------------------------------------------------------------------- +The MIT License (MIT) + +Copyright (c) <2013-2017> + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +------------------------------------------------------------------------------- + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved" +are retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/backend/src/apiserver/visualization/third_party_licenses/pbr.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pbr.LICENSE new file mode 100644 index 00000000000..68c771a0999 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pbr.LICENSE @@ -0,0 +1,176 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/pexpect.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pexpect.LICENSE new file mode 100644 index 00000000000..754db5afcb8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pexpect.LICENSE @@ -0,0 +1,20 @@ +ISC LICENSE + + This license is approved by the OSI and FSF as GPL-compatible. + http://opensource.org/licenses/isc-license.txt + + Copyright (c) 2013-2014, Pexpect development team + Copyright (c) 2012, Noah Spurrier + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/pickleshare.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pickleshare.LICENSE new file mode 100644 index 00000000000..53afb18c9ef --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pickleshare.LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Ville Vainio + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/prometheus-client.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/prometheus-client.LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/prometheus-client.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/prompt-toolkit.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/prompt-toolkit.LICENSE new file mode 100644 index 00000000000..e1720e0fb70 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/prompt-toolkit.LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2014, Jonathan Slenders +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +* Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/protobuf.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/protobuf.LICENSE new file mode 100644 index 00000000000..19b305b0006 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/protobuf.LICENSE @@ -0,0 +1,32 @@ +Copyright 2008 Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. diff --git a/backend/src/apiserver/visualization/third_party_licenses/ptyprocess.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/ptyprocess.LICENSE new file mode 100644 index 00000000000..9c772742de9 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/ptyprocess.LICENSE @@ -0,0 +1,16 @@ +Ptyprocess is under the ISC license, as code derived from Pexpect. + http://opensource.org/licenses/ISC + +Copyright (c) 2013-2014, Pexpect development team +Copyright (c) 2012, Noah Spurrier + +PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY PURPOSE +WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE COPYRIGHT NOTICE +AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES. THE SOFTWARE IS PROVIDED +"AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE +INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT +SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING +OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/pyarrow.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pyarrow.LICENSE new file mode 100644 index 00000000000..a078f5e6b80 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pyarrow.LICENSE @@ -0,0 +1,1988 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +-------------------------------------------------------------------------------- + +src/plasma/fling.cc and src/plasma/fling.h: Apache 2.0 + +Copyright 2013 Sharvil Nanavati + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +src/plasma/thirdparty/ae: Modified / 3-Clause BSD + +Copyright (c) 2006-2010, Salvatore Sanfilippo +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Redis nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +src/plasma/thirdparty/dlmalloc.c: CC0 + +This is a version (aka dlmalloc) of malloc/free/realloc written by +Doug Lea and released to the public domain, as explained at +http://creativecommons.org/publicdomain/zero/1.0/ Send questions, +comments, complaints, performance data, etc to dl@cs.oswego.edu + +-------------------------------------------------------------------------------- + +src/plasma/common.cc (some portions) + +Copyright (c) Austin Appleby (aappleby (AT) gmail) + +Some portions of this file are derived from code in the MurmurHash project + +All code is released to the public domain. For business purposes, Murmurhash is +under the MIT license. + +https://sites.google.com/site/murmurhash/ + +-------------------------------------------------------------------------------- + +src/arrow/util (some portions): Apache 2.0, and 3-clause BSD + +Some portions of this module are derived from code in the Chromium project, +copyright (c) Google inc and (c) The Chromium Authors and licensed under the +Apache 2.0 License or the under the 3-clause BSD license: + + Copyright (c) 2013 The Chromium Authors. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +This project includes code from Daniel Lemire's FrameOfReference project. + +https://github.com/lemire/FrameOfReference/blob/6ccaf9e97160f9a3b299e23a8ef739e711ef0c71/src/bpacking.cpp + +Copyright: 2013 Daniel Lemire +Home page: http://lemire.me/en/ +Project page: https://github.com/lemire/FrameOfReference +License: Apache License Version 2.0 http://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This project includes code from the TensorFlow project + +Copyright 2015 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +This project includes code from the NumPy project. + +https://github.com/numpy/numpy/blob/e1f191c46f2eebd6cb892a4bfe14d9dd43a06c4e/numpy/core/src/multiarray/multiarraymodule.c#L2910 + +https://github.com/numpy/numpy/blob/68fd82271b9ea5a9e50d4e761061dfcca851382a/numpy/core/src/multiarray/datetime.c + +Copyright (c) 2005-2017, NumPy Developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the NumPy Developers nor the names of any + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +This project includes code from the Boost project + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +This project includes code from the FlatBuffers project + +Copyright 2014 Google Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +This project includes code from the tslib project + +Copyright 2015 Microsoft Corporation. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +This project includes code from the jemalloc project + +https://github.com/jemalloc/jemalloc + +Copyright (C) 2002-2017 Jason Evans . +All rights reserved. +Copyright (C) 2007-2012 Mozilla Foundation. All rights reserved. +Copyright (C) 2009-2017 Facebook, Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice(s), + this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice(s), + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +-------------------------------------------------------------------------------- + +This project includes code from the Go project, BSD 3-clause license + PATENTS +weak patent termination clause +(https://github.com/golang/go/blob/master/PATENTS). + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +This project includes code from the hs2client + +https://github.com/cloudera/hs2client + +Copyright 2016 Cloudera Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +The script ci/scripts/util_wait_for_it.sh has the following license + +Copyright (c) 2016 Giles Hall + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +The script r/configure has the following license (MIT) + +Copyright (c) 2017, Jeroen Ooms and Jim Hester + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +cpp/src/arrow/util/logging.cc, cpp/src/arrow/util/logging.h and +cpp/src/arrow/util/logging-test.cc are adapted from +Ray Project (https://github.com/ray-project/ray) (Apache 2.0). + +Copyright (c) 2016 Ray Project (https://github.com/ray-project/ray) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- +The files cpp/src/arrow/vendored/datetime/date.h, cpp/src/arrow/vendored/datetime/tz.h, +cpp/src/arrow/vendored/datetime/tz_private.h, cpp/src/arrow/vendored/datetime/ios.h, +cpp/src/arrow/vendored/datetime/tz.cpp are adapted from +Howard Hinnant's date library (https://github.com/HowardHinnant/date) +It is licensed under MIT license. + +The MIT License (MIT) +Copyright (c) 2015, 2016, 2017 Howard Hinnant +Copyright (c) 2016 Adrian Colomitchi +Copyright (c) 2017 Florian Dang +Copyright (c) 2017 Paul Thompson +Copyright (c) 2018 Tomasz Kamiński + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +The file cpp/src/arrow/util/utf8.h includes code adapted from the page + https://bjoern.hoehrmann.de/utf-8/decoder/dfa/ +with the following license (MIT) + +Copyright (c) 2008-2009 Bjoern Hoehrmann + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------------------------------------------------------------------------------- + +The file cpp/src/arrow/vendored/string_view.hpp has the following license + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +The file cpp/src/arrow/vendored/variant.hpp has the following license + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/xxhash/ have the following license +(BSD 2-Clause License) + +xxHash Library +Copyright (c) 2012-2014, Yann Collet +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +You can contact the author at : +- xxHash homepage: http://www.xxhash.com +- xxHash source repository : https://github.com/Cyan4973/xxHash + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/double-conversion/ have the following license +(BSD 3-Clause License) + +Copyright 2006-2011, the V8 project authors. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/uriparser/ have the following license +(BSD 3-Clause License) + +uriparser - RFC 3986 URI parsing library + +Copyright (C) 2007, Weijia Song +Copyright (C) 2007, Sebastian Pipping +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + * Redistributions of source code must retain the above + copyright notice, this list of conditions and the following + disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + * Neither the name of the nor the names of its + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED +OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +The files under dev/tasks/conda-recipes have the following license + +BSD 3-clause license +Copyright (c) 2015-2018, conda-forge +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR +TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +The files in cpp/src/arrow/vendored/utf8cpp/ have the following license + +Copyright 2006 Nemanja Trifunovic + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +This project includes code from Apache Kudu. + + * cpp/cmake_modules/CompilerInfo.cmake is based on Kudu's cmake_modules/CompilerInfo.cmake + +Copyright: 2016 The Apache Software Foundation. +Home page: https://kudu.apache.org/ +License: http://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This project includes code from Apache Impala (incubating), formerly +Impala. The Impala code and rights were donated to the ASF as part of the +Incubator process after the initial code imports into Apache Parquet. + +Copyright: 2012 Cloudera, Inc. +Copyright: 2016 The Apache Software Foundation. +Home page: http://impala.apache.org/ +License: http://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This project includes code from Apache Aurora. + +* dev/release/{release,changelog,release-candidate} are based on the scripts from + Apache Aurora + +Copyright: 2016 The Apache Software Foundation. +Home page: https://aurora.apache.org/ +License: http://www.apache.org/licenses/LICENSE-2.0 + +-------------------------------------------------------------------------------- + +This project includes code from the Google styleguide. + +* cpp/build-support/cpplint.py is based on the scripts from the Google styleguide. + +Copyright: 2009 Google Inc. All rights reserved. +Homepage: https://github.com/google/styleguide +License: 3-clause BSD + +-------------------------------------------------------------------------------- + +This project includes code from Snappy. + +* cpp/cmake_modules/{SnappyCMakeLists.txt,SnappyConfig.h} are based on code + from Google's Snappy project. + +Copyright: 2009 Google Inc. All rights reserved. +Homepage: https://github.com/google/snappy +License: 3-clause BSD + +-------------------------------------------------------------------------------- + +This project includes code from the manylinux project. + +* python/manylinux1/scripts/{build_python.sh,python-tag-abi-tag.py, + requirements.txt} are based on code from the manylinux project. + +Copyright: 2016 manylinux +Homepage: https://github.com/pypa/manylinux +License: The MIT License (MIT) + +-------------------------------------------------------------------------------- + +This project includes code from the cymove project: + +* python/pyarrow/includes/common.pxd includes code from the cymove project + +The MIT License (MIT) +Copyright (c) 2019 Omer Ozarslan + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +The projects includes code from the Ursabot project under the dev/archery +directory. + +License: BSD 2-Clause + +Copyright 2019 RStudio, Inc. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +This project include code from CMake. + +* cpp/cmake_modules/FindGTest.cmake is based on code from CMake. + +Copyright: Copyright 2000-2019 Kitware, Inc. and Contributors +Homepage: https://gitlab.kitware.com/cmake/cmake +License: 3-clause BSD + +-------------------------------------------------------------------------------- + +This project include code from mingw-w64. + +* cpp/src/arrow/util/cpu-info.cc has a polyfill for mingw-w64 < 5 + +Copyright (c) 2009 - 2013 by the mingw-w64 project +Homepage: https://mingw-w64.org +License: Zope Public License (ZPL) Version 2.1. + +--------------------------------------------------------------------------------- + +This project include code from Google's Asylo project. + +* cpp/src/arrow/result.h is based on status_or.h + +Copyright (c) Copyright 2017 Asylo authors +Homepage: https://asylo.dev/ +License: Apache 2.0 + +-------------------------------------------------------------------------------- + +This project includes code from Google's protobuf project + +* cpp/src/arrow/result.h ARROW_ASSIGN_OR_RAISE is based off ASSIGN_OR_RETURN + +Copyright 2008 Google Inc. All rights reserved. +Homepage: https://developers.google.com/protocol-buffers/ +License: + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. + +-------------------------------------------------------------------------------- + +3rdparty dependency LLVM is statically linked in certain binary +distributions. LLVM has the following license: + +============================================================================== +LLVM Release License +============================================================================== +University of Illinois/NCSA +Open Source License + +Copyright (c) 2003-2018 University of Illinois at Urbana-Champaign. +All rights reserved. + +Developed by: + + LLVM Team + + University of Illinois at Urbana-Champaign + + http://llvm.org + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal with +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimers. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimers in the + documentation and/or other materials provided with the distribution. + + * Neither the names of the LLVM Team, University of Illinois at + Urbana-Champaign, nor the names of its contributors may be used to + endorse or promote products derived from this Software without specific + prior written permission. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE +SOFTWARE. + +============================================================================== +Copyrights and Licenses for Third Party Software Distributed with LLVM: +============================================================================== +The LLVM software contains code written by third parties. Such software will +have its own individual LICENSE.TXT file in the directory in which it appears. +This file will describe the copyrights, license, and restrictions which apply +to that code. + +The disclaimer of warranty in the University of Illinois Open Source License +applies to all code in the LLVM Distribution, and nothing in any of the +other licenses gives permission to use the names of the LLVM Team or the +University of Illinois to endorse or promote products derived from this +Software. + +The following pieces of software have additional or alternate copyrights, +licenses, and/or restrictions: + +Program Directory +------- --------- +Google Test llvm/utils/unittest/googletest +OpenBSD regex llvm/lib/Support/{reg*, COPYRIGHT.regex} +pyyaml tests llvm/test/YAMLParser/{*.data, LICENSE.TXT} +ARM contributions llvm/lib/Target/ARM/LICENSE.TXT +md5 contributions llvm/lib/Support/MD5.cpp llvm/include/llvm/Support/MD5.h + +-------------------------------------------------------------------------------- + +3rdparty dependency gRPC is statically linked in certain binary +distributions, like the python wheels. gRPC has the following license: + +Copyright 2014 gRPC authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +3rdparty dependency Apache Thrift is statically linked in certain binary +distributions, like the python wheels. Apache Thrift has the following license: + +Apache Thrift +Copyright (C) 2006 - 2019, The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +3rdparty dependency Apache ORC is statically linked in certain binary +distributions, like the python wheels. Apache ORC has the following license: + +Apache ORC +Copyright 2013-2019 The Apache Software Foundation + +This product includes software developed by The Apache Software +Foundation (http://www.apache.org/). + +This product includes software developed by Hewlett-Packard: +(c) Copyright [2014-2015] Hewlett-Packard Development Company, L.P + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +-------------------------------------------------------------------------------- + +3rdparty dependency zstd is statically linked in certain binary +distributions, like the python wheels. ZSTD has the following license: + +BSD License + +For Zstandard software + +Copyright (c) 2016-present, Facebook, Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name Facebook nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +3rdparty dependency lz4 is statically linked in certain binary +distributions, like the python wheels. lz4 has the following license: + +LZ4 Library +Copyright (c) 2011-2016, Yann Collet +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +3rdparty dependency Brotli is statically linked in certain binary +distributions, like the python wheels. Brotli has the following license: + +Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +-------------------------------------------------------------------------------- + +3rdparty dependency rapidjson is statically linked in certain binary +distributions, like the python wheels. rapidjson and its dependencies have the +following licenses: + +Tencent is pleased to support the open source community by making RapidJSON +available. + +Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. +All rights reserved. + +If you have downloaded a copy of the RapidJSON binary from Tencent, please note +that the RapidJSON binary is licensed under the MIT License. +If you have downloaded a copy of the RapidJSON source code from Tencent, please +note that RapidJSON source code is licensed under the MIT License, except for +the third-party components listed below which are subject to different license +terms. Your integration of RapidJSON into your own projects may require +compliance with the MIT License, as well as the other licenses applicable to +the third-party components included within RapidJSON. To avoid the problematic +JSON license in your own projects, it's sufficient to exclude the +bin/jsonchecker/ directory, as it's the only code under the JSON license. +A copy of the MIT License is included in this file. + +Other dependencies and licenses: + + Open Source Software Licensed Under the BSD License: + -------------------------------------------------------------------- + + The msinttypes r29 + Copyright (c) 2006-2013 Alexander Chemeris + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY + EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR + ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH + DAMAGE. + + Open Source Software Licensed Under the JSON License: + -------------------------------------------------------------------- + + json.org + Copyright (c) 2002 JSON.org + All Rights Reserved. + + JSON_checker + Copyright (c) 2002 JSON.org + All Rights Reserved. + + + Terms of the JSON License: + --------------------------------------------------- + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + The Software shall be used for Good, not Evil. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + + + Terms of the MIT License: + -------------------------------------------------------------------- + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + +-------------------------------------------------------------------------------- + +3rdparty dependency snappy is statically linked in certain binary +distributions, like the python wheels. snappy has the following license: + +Copyright 2011, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of Google Inc. nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +=== + +Some of the benchmark data in testdata/ is licensed differently: + + - fireworks.jpeg is Copyright 2013 Steinar H. Gunderson, and + is licensed under the Creative Commons Attribution 3.0 license + (CC-BY-3.0). See https://creativecommons.org/licenses/by/3.0/ + for more information. + + - kppkn.gtb is taken from the Gaviota chess tablebase set, and + is licensed under the MIT License. See + https://sites.google.com/site/gaviotachessengine/Home/endgame-tablebases-1 + for more information. + + - paper-100k.pdf is an excerpt (bytes 92160 to 194560) from the paper + “Combinatorial Modeling of Chromatin Features Quantitatively Predicts DNA + Replication Timing in _Drosophila_” by Federico Comoglio and Renato Paro, + which is licensed under the CC-BY license. See + http://www.ploscompbiol.org/static/license for more ifnormation. + + - alice29.txt, asyoulik.txt, plrabn12.txt and lcet10.txt are from Project + Gutenberg. The first three have expired copyrights and are in the public + domain; the latter does not have expired copyright, but is still in the + public domain according to the license information + (http://www.gutenberg.org/ebooks/53). + +-------------------------------------------------------------------------------- + +3rdparty dependency gflags is statically linked in certain binary +distributions, like the python wheels. gflags has the following license: + +Copyright (c) 2006, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +3rdparty dependency glog is statically linked in certain binary +distributions, like the python wheels. glog has the following license: + +Copyright (c) 2008, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +A function gettimeofday in utilities.cc is based on + +http://www.google.com/codesearch/p?hl=en#dR3YEbitojA/COPYING&q=GetSystemTimeAsFileTime%20license:bsd + +The license of this code is: + +Copyright (c) 2003-2008, Jouni Malinen and contributors +All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name(s) of the above-listed copyright holder(s) nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +3rdparty dependency re2 is statically linked in certain binary +distributions, like the python wheels. re2 has the following license: + +Copyright (c) 2009 The RE2 Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Google Inc. nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +3rdparty dependency c-ares is statically linked in certain binary +distributions, like the python wheels. c-ares has the following license: + +# c-ares license + +Copyright (c) 2007 - 2018, Daniel Stenberg with many contributors, see AUTHORS +file. + +Copyright 1998 by the Massachusetts Institute of Technology. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, provided that +the above copyright notice appear in all copies and that both that copyright +notice and this permission notice appear in supporting documentation, and that +the name of M.I.T. not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior permission. +M.I.T. makes no representations about the suitability of this software for any +purpose. It is provided "as is" without express or implied warranty. + +-------------------------------------------------------------------------------- + +3rdparty dependency zlib is redistributed as a dynamically linked shared +library in certain binary distributions, like the python wheels. In the future +this will likely change to static linkage. zlib has the following license: + +zlib.h -- interface of the 'zlib' general purpose compression library + version 1.2.11, January 15th, 2017 + + Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + +-------------------------------------------------------------------------------- + +3rdparty dependency openssl is redistributed as a dynamically linked shared +library in certain binary distributions, like the python wheels. openssl +preceding version 3 has the following license: + + LICENSE ISSUES + ============== + + The OpenSSL toolkit stays under a double license, i.e. both the conditions of + the OpenSSL License and the original SSLeay license apply to the toolkit. + See below for the actual license texts. + + OpenSSL License + --------------- + +/* ==================================================================== + * Copyright (c) 1998-2019 The OpenSSL Project. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. All advertising materials mentioning features or use of this + * software must display the following acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit. (http://www.openssl.org/)" + * + * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to + * endorse or promote products derived from this software without + * prior written permission. For written permission, please contact + * openssl-core@openssl.org. + * + * 5. Products derived from this software may not be called "OpenSSL" + * nor may "OpenSSL" appear in their names without prior written + * permission of the OpenSSL Project. + * + * 6. Redistributions of any form whatsoever must retain the following + * acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit (http://www.openssl.org/)" + * + * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY + * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + * OF THE POSSIBILITY OF SUCH DAMAGE. + * ==================================================================== + * + * This product includes cryptographic software written by Eric Young + * (eay@cryptsoft.com). This product includes software written by Tim + * Hudson (tjh@cryptsoft.com). + * + */ + + Original SSLeay License + ----------------------- + +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ + +-------------------------------------------------------------------------------- + +This project includes code from the rtools-backports project. + +* ci/scripts/PKGBUILD and ci/scripts/r_windows_build.sh are based on code + from the rtools-backports project. + +Copyright: Copyright (c) 2013 - 2019, Алексей and Jeroen Ooms. +All rights reserved. +Homepage: https://github.com/r-windows/rtools-backports +License: 3-clause BSD + +-------------------------------------------------------------------------------- + +Some code from pandas has been adapted for the pyarrow codebase. pandas is +available under the 3-clause BSD license, which follows: + +pandas license +============== + +Copyright (c) 2011-2012, Lambda Foundry, Inc. and PyData Development Team +All rights reserved. + +Copyright (c) 2008-2011 AQR Capital Management, LLC +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the copyright holder nor the names of any + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-------------------------------------------------------------------------------- + +Some bits from DyND, in particular aspects of the build system, have been +adapted from libdynd and dynd-python under the terms of the BSD 2-clause +license + +The BSD 2-Clause License + + Copyright (C) 2011-12, Dynamic NDArray Developers + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Dynamic NDArray Developers list: + + * Mark Wiebe + * Continuum Analytics + +-------------------------------------------------------------------------------- + +Some source code from Ibis (https://github.com/cloudera/ibis) has been adapted +for PyArrow. Ibis is released under the Apache License, Version 2.0. + +-------------------------------------------------------------------------------- + +This project includes code from the autobrew project. + +* r/tools/autobrew and dev/tasks/homebrew-formulae/autobrew/apache-arrow.rb + are based on code from the autobrew project. + +Copyright (c) 2019, Jeroen Ooms +License: MIT +Homepage: https://github.com/jeroen/autobrew + +-------------------------------------------------------------------------------- + +dev/tasks/homebrew-formulae/apache-arrow.rb has the following license: + +BSD 2-Clause License + +Copyright (c) 2009-present, Homebrew contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +---------------------------------------------------------------------- + +cpp/src/arrow/vendored/base64.cpp has the following license + +ZLIB License + +Copyright (C) 2004-2017 René Nyffenegger + +This source code is provided 'as-is', without any express or implied +warranty. In no event will the author be held liable for any damages arising +from the use of this software. + +Permission is granted to anyone to use this software for any purpose, including +commercial applications, and to alter it and redistribute it freely, subject to +the following restrictions: + +1. The origin of this source code must not be misrepresented; you must not + claim that you wrote the original source code. If you use this source code + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + +2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original source code. + +3. This notice may not be removed or altered from any source distribution. + +René Nyffenegger rene.nyffenegger@adp-gmbh.ch + +-------------------------------------------------------------------------------- + +The file cpp/src/arrow/vendored/optional.hpp has the following license + +Boost Software License - Version 1.0 - August 17th, 2003 + +Permission is hereby granted, free of charge, to any person or organization +obtaining a copy of the software and accompanying documentation covered by +this license (the "Software") to use, reproduce, display, distribute, +execute, and transmit the Software, and to prepare derivative works of the +Software, and to permit third-parties to whom the Software is furnished to +do so, all subject to the following: + +The copyright notices in the Software and this entire statement, including +the above license grant, this restriction and the following disclaimer, +must be included in all copies of the Software, in whole or in part, and +all derivative works of the Software, unless such copies or derivative +works are solely in the form of machine-executable object code generated by +a source language processor. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT +SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE +FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/pyasn1-modules.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pyasn1-modules.LICENSE new file mode 100644 index 00000000000..598b8430eff --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pyasn1-modules.LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2005-2020, Ilya Etingof +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/pyasn1.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pyasn1.LICENSE new file mode 100644 index 00000000000..598b8430eff --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pyasn1.LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2005-2020, Ilya Etingof +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/pycrypto.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pycrypto.LICENSE new file mode 100644 index 00000000000..a5bd19ccbee --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pycrypto.LICENSE @@ -0,0 +1,77 @@ +Copyright and licensing of the Python Cryptography Toolkit ("PyCrypto"): +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Previously, the copyright and/or licensing status of the Python +Cryptography Toolkit ("PyCrypto") had been somewhat ambiguous. The +original intention of Andrew M. Kuchling and other contributors has +been to dedicate PyCrypto to the public domain, but that intention was +not necessarily made clear in the original disclaimer (see +LEGAL/copy/LICENSE.orig). + +Additionally, some files within PyCrypto had specified their own +licenses that differed from the PyCrypto license itself. For example, +the original RIPEMD.c module simply had a copyright statement and +warranty disclaimer, without clearly specifying any license terms. +(An updated version on the author's website came with a license that +contained a GPL-incompatible advertising clause.) + +To rectify this situation for PyCrypto 2.1, the following steps have +been taken: + + 1. Obtaining explicit permission from the original contributors to + dedicate their contributions to the public domain if they have not + already done so. (See the "LEGAL/copy/stmts" directory for + contributors' statements.) + + 2. Replacing some modules with clearly-licensed code from other + sources (e.g. the DES and DES3 modules were replaced with new ones + based on Tom St. Denis's public-domain LibTomCrypt library.) + + 3. Replacing some modules with code written from scratch (e.g. the + RIPEMD and Blowfish modules were re-implemented from their + respective algorithm specifications without reference to the old + implementations). + + 4. Removing some modules altogether without replacing them. + +To the best of our knowledge, with the exceptions noted below or +within the files themselves, the files that constitute PyCrypto are in +the public domain. Most are distributed with the following notice: + + The contents of this file are dedicated to the public domain. To + the extent that dedication to the public domain is not available, + everyone is granted a worldwide, perpetual, royalty-free, + non-exclusive license to exercise all rights associated with the + contents of this file for any purpose whatsoever. + No rights are reserved. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN + ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +Exceptions: + + - Portions of HMAC.py and setup.py are derived from Python 2.2, and + are therefore Copyright (c) 2001, 2002, 2003 Python Software + Foundation (All Rights Reserved). They are licensed by the PSF + under the terms of the Python 2.2 license. (See the file + LEGAL/copy/LICENSE.python-2.2 for details.) + + - The various GNU autotools (autoconf, automake, aclocal, etc.) are + used during the build process. This includes macros from + autoconf-archive, which are located in the m4/ directory. As is + customary, some files from the GNU autotools are included in the + source tree (in the root directory, and in the build-aux/ + directory). These files are merely part of the build process, and + are not included in binary builds of the software. + +EXPORT RESTRICTIONS: + +Note that the export or re-export of cryptographic software and/or +source code may be subject to regulation in your jurisdiction. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/pydot.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pydot.LICENSE new file mode 100644 index 00000000000..b5c8ed93f27 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pydot.LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2014 Carlos Jenkins +Copyright (c) 2014 Lance Helper +Copyright (c) 2004 Ero Carrera + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/pygobject.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pygobject.LICENSE new file mode 100644 index 00000000000..ae23fcfda2d --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pygobject.LICENSE @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! + + diff --git a/backend/src/apiserver/visualization/third_party_licenses/pymongo.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pymongo.LICENSE new file mode 100644 index 00000000000..261eeb9e9f8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pymongo.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/pyparsing.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pyparsing.LICENSE new file mode 100644 index 00000000000..1bf98523e33 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pyparsing.LICENSE @@ -0,0 +1,18 @@ +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/pyrsistent.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pyrsistent.LICENSE new file mode 100644 index 00000000000..6609e4c05a3 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pyrsistent.LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2019 Tobias Gustafsson + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/backend/src/apiserver/visualization/third_party_licenses/python-dateutil.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/python-dateutil.LICENSE new file mode 100644 index 00000000000..1e65815cf0b --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/python-dateutil.LICENSE @@ -0,0 +1,54 @@ +Copyright 2017- Paul Ganssle +Copyright 2017- dateutil contributors (see AUTHORS file) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +The above license applies to all contributions after 2017-12-01, as well as +all contributions that have been re-licensed (see AUTHORS file for the list of +contributors who have re-licensed their code). +-------------------------------------------------------------------------------- +dateutil - Extensions to the standard Python datetime module. + +Copyright (c) 2003-2011 - Gustavo Niemeyer +Copyright (c) 2012-2014 - Tomi Pieviläinen +Copyright (c) 2014-2016 - Yaron de Leeuw +Copyright (c) 2015- - Paul Ganssle +Copyright (c) 2015- - dateutil contributors (see AUTHORS file) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The above BSD License Applies to all code, even that also covered by Apache 2.0. \ No newline at end of file diff --git a/backend/src/apiserver/visualization/third_party_licenses/pytz.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pytz.LICENSE new file mode 100644 index 00000000000..3d0df6b9c07 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pytz.LICENSE @@ -0,0 +1,680 @@ + + + + + + + + pytz - World Timezone Definitions for Python — pytz 2014.10 documentation + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

pytz - World Timezone Definitions for Python

+ +++ + + + +
Author:Stuart Bishop <stuart@stuartbishop.net>
+
+

Introduction

+

pytz brings the Olson tz database into Python. This library allows +accurate and cross platform timezone calculations using Python 2.4 +or higher. It also solves the issue of ambiguous times at the end +of daylight saving time, which you can read more about in the Python +Library Reference (datetime.tzinfo).

+

Almost all of the Olson timezones are supported.

+
+

Note

+

This library differs from the documented Python API for +tzinfo implementations; if you want to create local wallclock +times you need to use the localize() method documented in this +document. In addition, if you perform date arithmetic on local +times that cross DST boundaries, the result may be in an incorrect +timezone (ie. subtract 1 minute from 2002-10-27 1:00 EST and you get +2002-10-27 0:59 EST instead of the correct 2002-10-27 1:59 EDT). A +normalize() method is provided to correct this. Unfortunately these +issues cannot be resolved without modifying the Python datetime +implementation (see PEP-431).

+
+
+
+

Installation

+

This package can either be installed from a .egg file using setuptools, +or from the tarball using the standard Python distutils.

+

If you are installing from a tarball, run the following command as an +administrative user:

+
python setup.py install
+
+
+

If you are installing using setuptools, you don’t even need to download +anything as the latest version will be downloaded for you +from the Python package index:

+
easy_install --upgrade pytz
+
+
+

If you already have the .egg file, you can use that too:

+
easy_install pytz-2008g-py2.6.egg
+
+
+
+
+

Example & Usage

+
+

Localized times and date arithmetic

+
>>> from datetime import datetime, timedelta
+>>> from pytz import timezone
+>>> import pytz
+>>> utc = pytz.utc
+>>> utc.zone
+'UTC'
+>>> eastern = timezone('US/Eastern')
+>>> eastern.zone
+'US/Eastern'
+>>> amsterdam = timezone('Europe/Amsterdam')
+>>> fmt = '%Y-%m-%d %H:%M:%S %Z%z'
+
+
+

This library only supports two ways of building a localized time. The +first is to use the localize() method provided by the pytz library. +This is used to localize a naive datetime (datetime with no timezone +information):

+
>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 6, 0, 0))
+>>> print(loc_dt.strftime(fmt))
+2002-10-27 06:00:00 EST-0500
+
+
+

The second way of building a localized time is by converting an existing +localized time using the standard astimezone() method:

+
>>> ams_dt = loc_dt.astimezone(amsterdam)
+>>> ams_dt.strftime(fmt)
+'2002-10-27 12:00:00 CET+0100'
+
+
+

Unfortunately using the tzinfo argument of the standard datetime +constructors ‘’does not work’’ with pytz for many timezones.

+
>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=amsterdam).strftime(fmt)
+'2002-10-27 12:00:00 LMT+0020'
+
+
+

It is safe for timezones without daylight saving transitions though, such +as UTC:

+
>>> datetime(2002, 10, 27, 12, 0, 0, tzinfo=pytz.utc).strftime(fmt)
+'2002-10-27 12:00:00 UTC+0000'
+
+
+

The preferred way of dealing with times is to always work in UTC, +converting to localtime only when generating output to be read +by humans.

+
>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)
+>>> loc_dt = utc_dt.astimezone(eastern)
+>>> loc_dt.strftime(fmt)
+'2002-10-27 01:00:00 EST-0500'
+
+
+

This library also allows you to do date arithmetic using local +times, although it is more complicated than working in UTC as you +need to use the normalize() method to handle daylight saving time +and other timezone transitions. In this example, loc_dt is set +to the instant when daylight saving time ends in the US/Eastern +timezone.

+
>>> before = loc_dt - timedelta(minutes=10)
+>>> before.strftime(fmt)
+'2002-10-27 00:50:00 EST-0500'
+>>> eastern.normalize(before).strftime(fmt)
+'2002-10-27 01:50:00 EDT-0400'
+>>> after = eastern.normalize(before + timedelta(minutes=20))
+>>> after.strftime(fmt)
+'2002-10-27 01:10:00 EST-0500'
+
+
+

Creating local times is also tricky, and the reason why working with +local times is not recommended. Unfortunately, you cannot just pass +a tzinfo argument when constructing a datetime (see the next +section for more details)

+
>>> dt = datetime(2002, 10, 27, 1, 30, 0)
+>>> dt1 = eastern.localize(dt, is_dst=True)
+>>> dt1.strftime(fmt)
+'2002-10-27 01:30:00 EDT-0400'
+>>> dt2 = eastern.localize(dt, is_dst=False)
+>>> dt2.strftime(fmt)
+'2002-10-27 01:30:00 EST-0500'
+
+
+

Converting between timezones is more easily done, using the +standard astimezone method.

+
>>> utc_dt = utc.localize(datetime.utcfromtimestamp(1143408899))
+>>> utc_dt.strftime(fmt)
+'2006-03-26 21:34:59 UTC+0000'
+>>> au_tz = timezone('Australia/Sydney')
+>>> au_dt = utc_dt.astimezone(au_tz)
+>>> au_dt.strftime(fmt)
+'2006-03-27 08:34:59 AEDT+1100'
+>>> utc_dt2 = au_dt.astimezone(utc)
+>>> utc_dt2.strftime(fmt)
+'2006-03-26 21:34:59 UTC+0000'
+>>> utc_dt == utc_dt2
+True
+
+
+

You can take shortcuts when dealing with the UTC side of timezone +conversions. normalize() and localize() are not really +necessary when there are no daylight saving time transitions to +deal with.

+
>>> utc_dt = datetime.utcfromtimestamp(1143408899).replace(tzinfo=utc)
+>>> utc_dt.strftime(fmt)
+'2006-03-26 21:34:59 UTC+0000'
+>>> au_tz = timezone('Australia/Sydney')
+>>> au_dt = au_tz.normalize(utc_dt.astimezone(au_tz))
+>>> au_dt.strftime(fmt)
+'2006-03-27 08:34:59 AEDT+1100'
+>>> utc_dt2 = au_dt.astimezone(utc)
+>>> utc_dt2.strftime(fmt)
+'2006-03-26 21:34:59 UTC+0000'
+
+
+
+
+

tzinfo API

+

The tzinfo instances returned by the timezone() function have +been extended to cope with ambiguous times by adding an is_dst +parameter to the utcoffset(), dst() && tzname() methods.

+
>>> tz = timezone('America/St_Johns')
+
+
+
>>> normal = datetime(2009, 9, 1)
+>>> ambiguous = datetime(2009, 10, 31, 23, 30)
+
+
+

The is_dst parameter is ignored for most timestamps. It is only used +during DST transition ambiguous periods to resulve that ambiguity.

+
>>> tz.utcoffset(normal, is_dst=True)
+datetime.timedelta(-1, 77400)
+>>> tz.dst(normal, is_dst=True)
+datetime.timedelta(0, 3600)
+>>> tz.tzname(normal, is_dst=True)
+'NDT'
+
+
+
>>> tz.utcoffset(ambiguous, is_dst=True)
+datetime.timedelta(-1, 77400)
+>>> tz.dst(ambiguous, is_dst=True)
+datetime.timedelta(0, 3600)
+>>> tz.tzname(ambiguous, is_dst=True)
+'NDT'
+
+
+
>>> tz.utcoffset(normal, is_dst=False)
+datetime.timedelta(-1, 77400)
+>>> tz.dst(normal, is_dst=False)
+datetime.timedelta(0, 3600)
+>>> tz.tzname(normal, is_dst=False)
+'NDT'
+
+
+
>>> tz.utcoffset(ambiguous, is_dst=False)
+datetime.timedelta(-1, 73800)
+>>> tz.dst(ambiguous, is_dst=False)
+datetime.timedelta(0)
+>>> tz.tzname(ambiguous, is_dst=False)
+'NST'
+
+
+

If is_dst is not specified, ambiguous timestamps will raise +an pytz.exceptions.AmbiguousTimeError exception.

+
>>> tz.utcoffset(normal)
+datetime.timedelta(-1, 77400)
+>>> tz.dst(normal)
+datetime.timedelta(0, 3600)
+>>> tz.tzname(normal)
+'NDT'
+
+
+
>>> import pytz.exceptions
+>>> try:
+...     tz.utcoffset(ambiguous)
+... except pytz.exceptions.AmbiguousTimeError:
+...     print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous)
+pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00
+>>> try:
+...     tz.dst(ambiguous)
+... except pytz.exceptions.AmbiguousTimeError:
+...     print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous)
+pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00
+>>> try:
+...     tz.tzname(ambiguous)
+... except pytz.exceptions.AmbiguousTimeError:
+...     print('pytz.exceptions.AmbiguousTimeError: %s' % ambiguous)
+pytz.exceptions.AmbiguousTimeError: 2009-10-31 23:30:00
+
+
+
+
+
+

Problems with Localtime

+

The major problem we have to deal with is that certain datetimes +may occur twice in a year. For example, in the US/Eastern timezone +on the last Sunday morning in October, the following sequence +happens:

+
+
    +
  • 01:00 EDT occurs
  • +
  • 1 hour later, instead of 2:00am the clock is turned back 1 hour +and 01:00 happens again (this time 01:00 EST)
  • +
+
+

In fact, every instant between 01:00 and 02:00 occurs twice. This means +that if you try and create a time in the ‘US/Eastern’ timezone +the standard datetime syntax, there is no way to specify if you meant +before of after the end-of-daylight-saving-time transition. Using the +pytz custom syntax, the best you can do is make an educated guess:

+
>>> loc_dt = eastern.localize(datetime(2002, 10, 27, 1, 30, 00))
+>>> loc_dt.strftime(fmt)
+'2002-10-27 01:30:00 EST-0500'
+
+
+

As you can see, the system has chosen one for you and there is a 50% +chance of it being out by one hour. For some applications, this does +not matter. However, if you are trying to schedule meetings with people +in different timezones or analyze log files it is not acceptable.

+

The best and simplest solution is to stick with using UTC. The pytz +package encourages using UTC for internal timezone representation by +including a special UTC implementation based on the standard Python +reference implementation in the Python documentation.

+

The UTC timezone unpickles to be the same instance, and pickles to a +smaller size than other pytz tzinfo instances. The UTC implementation +can be obtained as pytz.utc, pytz.UTC, or pytz.timezone(‘UTC’).

+
>>> import pickle, pytz
+>>> dt = datetime(2005, 3, 1, 14, 13, 21, tzinfo=utc)
+>>> naive = dt.replace(tzinfo=None)
+>>> p = pickle.dumps(dt, 1)
+>>> naive_p = pickle.dumps(naive, 1)
+>>> len(p) - len(naive_p)
+17
+>>> new = pickle.loads(p)
+>>> new == dt
+True
+>>> new is dt
+False
+>>> new.tzinfo is dt.tzinfo
+True
+>>> pytz.utc is pytz.UTC is pytz.timezone('UTC')
+True
+
+
+

Note that some other timezones are commonly thought of as the same (GMT, +Greenwich, Universal, etc.). The definition of UTC is distinct from these +other timezones, and they are not equivalent. For this reason, they will +not compare the same in Python.

+
>>> utc == pytz.timezone('GMT')
+False
+
+
+

See the section What is UTC, below.

+

If you insist on working with local times, this library provides a +facility for constructing them unambiguously:

+
>>> loc_dt = datetime(2002, 10, 27, 1, 30, 00)
+>>> est_dt = eastern.localize(loc_dt, is_dst=True)
+>>> edt_dt = eastern.localize(loc_dt, is_dst=False)
+>>> print(est_dt.strftime(fmt) + ' / ' + edt_dt.strftime(fmt))
+2002-10-27 01:30:00 EDT-0400 / 2002-10-27 01:30:00 EST-0500
+
+
+

If you pass None as the is_dst flag to localize(), pytz will refuse to +guess and raise exceptions if you try to build ambiguous or non-existent +times.

+

For example, 1:30am on 27th Oct 2002 happened twice in the US/Eastern +timezone when the clocks where put back at the end of Daylight Saving +Time:

+
>>> dt = datetime(2002, 10, 27, 1, 30, 00)
+>>> try:
+...     eastern.localize(dt, is_dst=None)
+... except pytz.exceptions.AmbiguousTimeError:
+...     print('pytz.exceptions.AmbiguousTimeError: %s' % dt)
+pytz.exceptions.AmbiguousTimeError: 2002-10-27 01:30:00
+
+
+

Similarly, 2:30am on 7th April 2002 never happened at all in the +US/Eastern timezone, as the clocks where put forward at 2:00am skipping +the entire hour:

+
>>> dt = datetime(2002, 4, 7, 2, 30, 00)
+>>> try:
+...     eastern.localize(dt, is_dst=None)
+... except pytz.exceptions.NonExistentTimeError:
+...     print('pytz.exceptions.NonExistentTimeError: %s' % dt)
+pytz.exceptions.NonExistentTimeError: 2002-04-07 02:30:00
+
+
+

Both of these exceptions share a common base class to make error handling +easier:

+
>>> isinstance(pytz.AmbiguousTimeError(), pytz.InvalidTimeError)
+True
+>>> isinstance(pytz.NonExistentTimeError(), pytz.InvalidTimeError)
+True
+
+
+

A special case is where countries change their timezone definitions +with no daylight savings time switch. For example, in 1915 Warsaw +switched from Warsaw time to Central European time with no daylight savings +transition. So at the stroke of midnight on August 5th 1915 the clocks +were wound back 24 minutes creating an ambiguous time period that cannot +be specified without referring to the timezone abbreviation or the +actual UTC offset. In this case midnight happened twice, neither time +during a daylight saving time period. pytz handles this transition by +treating the ambiguous period before the switch as daylight savings +time, and the ambiguous period after as standard time.

+
>>> warsaw = pytz.timezone('Europe/Warsaw')
+>>> amb_dt1 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=True)
+>>> amb_dt1.strftime(fmt)
+'1915-08-04 23:59:59 WMT+0124'
+>>> amb_dt2 = warsaw.localize(datetime(1915, 8, 4, 23, 59, 59), is_dst=False)
+>>> amb_dt2.strftime(fmt)
+'1915-08-04 23:59:59 CET+0100'
+>>> switch_dt = warsaw.localize(datetime(1915, 8, 5, 00, 00, 00), is_dst=False)
+>>> switch_dt.strftime(fmt)
+'1915-08-05 00:00:00 CET+0100'
+>>> str(switch_dt - amb_dt1)
+'0:24:01'
+>>> str(switch_dt - amb_dt2)
+'0:00:01'
+
+
+

The best way of creating a time during an ambiguous time period is +by converting from another timezone such as UTC:

+
>>> utc_dt = datetime(1915, 8, 4, 22, 36, tzinfo=pytz.utc)
+>>> utc_dt.astimezone(warsaw).strftime(fmt)
+'1915-08-04 23:36:00 CET+0100'
+
+
+

The standard Python way of handling all these ambiguities is not to +handle them, such as demonstrated in this example using the US/Eastern +timezone definition from the Python documentation (Note that this +implementation only works for dates between 1987 and 2006 - it is +included for tests only!):

+
>>> from pytz.reference import Eastern # pytz.reference only for tests
+>>> dt = datetime(2002, 10, 27, 0, 30, tzinfo=Eastern)
+>>> str(dt)
+'2002-10-27 00:30:00-04:00'
+>>> str(dt + timedelta(hours=1))
+'2002-10-27 01:30:00-05:00'
+>>> str(dt + timedelta(hours=2))
+'2002-10-27 02:30:00-05:00'
+>>> str(dt + timedelta(hours=3))
+'2002-10-27 03:30:00-05:00'
+
+
+

Notice the first two results? At first glance you might think they are +correct, but taking the UTC offset into account you find that they are +actually two hours appart instead of the 1 hour we asked for.

+
>>> from pytz.reference import UTC # pytz.reference only for tests
+>>> str(dt.astimezone(UTC))
+'2002-10-27 04:30:00+00:00'
+>>> str((dt + timedelta(hours=1)).astimezone(UTC))
+'2002-10-27 06:30:00+00:00'
+
+
+
+
+

Country Information

+

A mechanism is provided to access the timezones commonly in use +for a particular country, looked up using the ISO 3166 country code. +It returns a list of strings that can be used to retrieve the relevant +tzinfo instance using pytz.timezone():

+
>>> print(' '.join(pytz.country_timezones['nz']))
+Pacific/Auckland Pacific/Chatham
+
+
+

The Olson database comes with a ISO 3166 country code to English country +name mapping that pytz exposes as a dictionary:

+
>>> print(pytz.country_names['nz'])
+New Zealand
+
+
+
+
+

What is UTC

+

‘UTC’ is Coordinated Universal Time. It is a successor to, but distinct +from, Greenwich Mean Time (GMT) and the various definitions of Universal +Time. UTC is now the worldwide standard for regulating clocks and time +measurement.

+

All other timezones are defined relative to UTC, and include offsets like +UTC+0800 - hours to add or subtract from UTC to derive the local time. No +daylight saving time occurs in UTC, making it a useful timezone to perform +date arithmetic without worrying about the confusion and ambiguities caused +by daylight saving time transitions, your country changing its timezone, or +mobile computers that roam through multiple timezones.

+
+
+

Helpers

+

There are two lists of timezones provided.

+

all_timezones is the exhaustive list of the timezone names that can +be used.

+
>>> from pytz import all_timezones
+>>> len(all_timezones) >= 500
+True
+>>> 'Etc/Greenwich' in all_timezones
+True
+
+
+

common_timezones is a list of useful, current timezones. It doesn’t +contain deprecated zones or historical zones, except for a few I’ve +deemed in common usage, such as US/Eastern (open a bug report if you +think other timezones are deserving of being included here). It is also +a sequence of strings.

+
>>> from pytz import common_timezones
+>>> len(common_timezones) < len(all_timezones)
+True
+>>> 'Etc/Greenwich' in common_timezones
+False
+>>> 'Australia/Melbourne' in common_timezones
+True
+>>> 'US/Eastern' in common_timezones
+True
+>>> 'Canada/Eastern' in common_timezones
+True
+>>> 'US/Pacific-New' in all_timezones
+True
+>>> 'US/Pacific-New' in common_timezones
+False
+
+
+

Both common_timezones and all_timezones are alphabetically +sorted:

+
>>> common_timezones_dupe = common_timezones[:]
+>>> common_timezones_dupe.sort()
+>>> common_timezones == common_timezones_dupe
+True
+>>> all_timezones_dupe = all_timezones[:]
+>>> all_timezones_dupe.sort()
+>>> all_timezones == all_timezones_dupe
+True
+
+
+

all_timezones and common_timezones are also available as sets.

+
>>> from pytz import all_timezones_set, common_timezones_set
+>>> 'US/Eastern' in all_timezones_set
+True
+>>> 'US/Eastern' in common_timezones_set
+True
+>>> 'Australia/Victoria' in common_timezones_set
+False
+
+
+

You can also retrieve lists of timezones used by particular countries +using the country_timezones() function. It requires an ISO-3166 +two letter country code.

+
>>> from pytz import country_timezones
+>>> print(' '.join(country_timezones('ch')))
+Europe/Zurich
+>>> print(' '.join(country_timezones('CH')))
+Europe/Zurich
+
+
+
+
+

Internationalization - i18n/l10n

+

Pytz is an interface to the IANA database, which uses ASCII names. The Unicode Consortium’s Unicode Locales (CLDR) +project provides translations. Thomas Khyn’s +l18n package can be used to access +these translations from Python.

+
+
+

License

+

MIT license.

+

This code is also available as part of Zope 3 under the Zope Public +License, Version 2.1 (ZPL).

+

I’m happy to relicense this code if necessary for inclusion in other +open source projects.

+
+
+

Latest Versions

+

This package will be updated after releases of the Olson timezone +database. The latest version can be downloaded from the Python Package +Index. The code that is used +to generate this distribution is hosted on launchpad.net and available +using the Bazaar version control system +using:

+
bzr branch lp:pytz
+
+
+

Announcements of new releases are made on +Launchpad, and the +Atom feed +hosted there.

+
+
+

Bugs, Feature Requests & Patches

+

Bugs can be reported using Launchpad.

+
+
+

Issues & Limitations

+
    +
  • Offsets from UTC are rounded to the nearest whole minute, so timezones +such as Europe/Amsterdam pre 1937 will be up to 30 seconds out. This +is a limitation of the Python datetime library.
  • +
  • If you think a timezone definition is incorrect, I probably can’t fix +it. pytz is a direct translation of the Olson timezone database, and +changes to the timezone definitions need to be made to this source. +If you find errors they should be reported to the time zone mailing +list, linked from http://www.iana.org/time-zones.
  • +
+
+
+

Further Reading

+

More info than you want to know about timezones: +http://www.twinsun.com/tz/tz-link.htm

+
+
+

Contact

+

Stuart Bishop <stuart@stuartbishop.net>

+
+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/backend/src/apiserver/visualization/third_party_licenses/pyxdg.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pyxdg.LICENSE new file mode 100644 index 00000000000..161a3d1d47b --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pyxdg.LICENSE @@ -0,0 +1,482 @@ + GNU LIBRARY GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1991 Free Software Foundation, Inc. + 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the library GPL. It is + numbered 2 because it goes with version 2 of the ordinary GPL.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Library General Public License, applies to some +specially designated Free Software Foundation software, and to any +other libraries whose authors decide to use it. You can use it for +your libraries, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if +you distribute copies of the library, or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link a program with the library, you must provide +complete object files to the recipients so that they can relink them +with the library, after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + Our method of protecting your rights has two steps: (1) copyright +the library, and (2) offer you this license which gives you legal +permission to copy, distribute and/or modify the library. + + Also, for each distributor's protection, we want to make certain +that everyone understands that there is no warranty for this free +library. If the library is modified by someone else and passed on, we +want its recipients to know that what they have is not the original +version, so that any problems introduced by others will not reflect on +the original authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that companies distributing free +software will individually obtain patent licenses, thus in effect +transforming the program into proprietary software. To prevent this, +we have made it clear that any patent must be licensed for everyone's +free use or not licensed at all. + + Most GNU software, including some libraries, is covered by the ordinary +GNU General Public License, which was designed for utility programs. This +license, the GNU Library General Public License, applies to certain +designated libraries. This license is quite different from the ordinary +one; be sure to read it in full, and don't assume that anything in it is +the same as in the ordinary license. + + The reason we have a separate public license for some libraries is that +they blur the distinction we usually make between modifying or adding to a +program and simply using it. Linking a program with a library, without +changing the library, is in some sense simply using the library, and is +analogous to running a utility program or application program. However, in +a textual and legal sense, the linked executable is a combined work, a +derivative of the original library, and the ordinary General Public License +treats it as such. + + Because of this blurred distinction, using the ordinary General +Public License for libraries did not effectively promote software +sharing, because most developers did not use the libraries. We +concluded that weaker conditions might promote sharing better. + + However, unrestricted linking of non-free programs would deprive the +users of those programs of all benefit from the free status of the +libraries themselves. This Library General Public License is intended to +permit developers of non-free programs to use free libraries, while +preserving your freedom as a user of such programs to change the free +libraries that are incorporated in them. (We have not seen how to achieve +this as regards changes in header files, but we have achieved it as regards +changes in the actual functions of the Library.) The hope is that this +will lead to faster development of free libraries. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, while the latter only +works together with the library. + + Note that it is possible for a library to be covered by the ordinary +General Public License rather than by this special one. + + GNU LIBRARY GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library which +contains a notice placed by the copyright holder or other authorized +party saying it may be distributed under the terms of this Library +General Public License (also called "this License"). Each licensee is +addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also compile or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + c) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + d) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the source code distributed need not include anything that is normally +distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Library General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + Appendix: How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Library General Public + License as published by the Free Software Foundation; either + version 2 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Library General Public License for more details. + + You should have received a copy of the GNU Library General Public + License along with this library; if not, write to the Free + Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, + MA 02111-1307, USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! diff --git a/backend/src/apiserver/visualization/third_party_licenses/pyzmq.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/pyzmq.LICENSE new file mode 100644 index 00000000000..a0a3790ba84 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/pyzmq.LICENSE @@ -0,0 +1,32 @@ +PyZMQ is licensed under the terms of the Modified BSD License (also known as +New or Revised BSD), as follows: + +Copyright (c) 2009-2012, Brian Granger, Min Ragan-Kelley + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of PyZMQ nor the names of its contributors may be used to +endorse or promote products derived from this software without specific prior +written permission. + + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/qtconsole.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/qtconsole.LICENSE new file mode 100644 index 00000000000..0dc89813dc1 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/qtconsole.LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2017, Project Jupyter Contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/requests-oauthlib.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/requests-oauthlib.LICENSE new file mode 100644 index 00000000000..de09f408cec --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/requests-oauthlib.LICENSE @@ -0,0 +1,15 @@ +ISC License + +Copyright (c) 2014 Kenneth Reitz. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/requests.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/requests.LICENSE new file mode 100644 index 00000000000..13d91ddc7a8 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/requests.LICENSE @@ -0,0 +1,13 @@ +Copyright 2019 Kenneth Reitz + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/rsa.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/rsa.LICENSE new file mode 100644 index 00000000000..67589cbb860 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/rsa.LICENSE @@ -0,0 +1,13 @@ +Copyright 2011 Sybren A. Stüvel + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/scikit-learn.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/scikit-learn.LICENSE new file mode 100644 index 00000000000..b98af187101 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/scikit-learn.LICENSE @@ -0,0 +1,32 @@ +New BSD License + +Copyright (c) 2007–2020 The scikit-learn developers. +All rights reserved. + + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + a. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + b. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + c. Neither the name of the Scikit-learn Developers nor the names of + its contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/scipy.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/scipy.LICENSE new file mode 100644 index 00000000000..5a0a660d8ed --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/scipy.LICENSE @@ -0,0 +1,30 @@ +Copyright (c) 2001-2002 Enthought, Inc. 2003-2019, SciPy Developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/six.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/six.LICENSE new file mode 100644 index 00000000000..de6633112c1 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/six.LICENSE @@ -0,0 +1,18 @@ +Copyright (c) 2010-2020 Benjamin Peterson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/tensorboard.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/tensorboard.LICENSE new file mode 100644 index 00000000000..15ae4214045 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/tensorboard.LICENSE @@ -0,0 +1,203 @@ +Copyright 2017 The TensorFlow Authors. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2017, The TensorFlow Authors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/tensorflow-data-validation.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-data-validation.LICENSE new file mode 100644 index 00000000000..a196a6a7772 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-data-validation.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2018] The Tensorflow Data Validation Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/tensorflow-estimator.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-estimator.LICENSE new file mode 100644 index 00000000000..4862420c023 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-estimator.LICENSE @@ -0,0 +1,203 @@ +Copyright 2018 The TensorFlow Authors. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2017, The TensorFlow Authors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/tensorflow-metadata.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-metadata.LICENSE new file mode 100644 index 00000000000..d3b7a94cecd --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-metadata.LICENSE @@ -0,0 +1,203 @@ +Copyright 2017 The tf.Metadata Authors. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2017, The tf.Metadata Authors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/tensorflow-model-analysis.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-model-analysis.LICENSE new file mode 100644 index 00000000000..c6be8b741a9 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-model-analysis.LICENSE @@ -0,0 +1,255 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +-------------------------------------------------------------------------------- +MIT +The MIT License (MIT) + +Copyright (c) 2014-2015, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +-------------------------------------------------------------------------------- +BSD-3-Clause +Copyright (c) 2016, Daniel Wirtz All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +* Neither the name of its author, nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/tensorflow-serving-api.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-serving-api.LICENSE new file mode 100644 index 00000000000..afb00ed7ac3 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-serving-api.LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016, The TensorFlow Serving Authors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/tensorflow-transform.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-transform.LICENSE new file mode 100644 index 00000000000..b36afafdf70 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/tensorflow-transform.LICENSE @@ -0,0 +1,203 @@ +Copyright 2015 The TF.Transform Authors. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2016, The TF.Transform Authors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/tensorflow.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/tensorflow.LICENSE new file mode 100644 index 00000000000..40f8c347693 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/tensorflow.LICENSE @@ -0,0 +1,203 @@ +Copyright 2019 The TensorFlow Authors. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/termcolor.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/termcolor.LICENSE new file mode 100644 index 00000000000..3105888ec14 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/termcolor.LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2010 Jonathan Hartley +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holders, nor those of its contributors + may be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/terminado.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/terminado.LICENSE new file mode 100644 index 00000000000..c1903148149 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/terminado.LICENSE @@ -0,0 +1,28 @@ +# terminado: A python websocket server backend for xterm.js +# +# BSD License +# +# Copyright (c) 2014-, Jupyter development team +# Copyright (c) 2014, Ramalingam Saravanan +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/backend/src/apiserver/visualization/third_party_licenses/testpath.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/testpath.LICENSE new file mode 100644 index 00000000000..e95629f1073 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/testpath.LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2015, The Jupyter Development Team +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/tfx-bsl.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/tfx-bsl.LICENSE new file mode 100644 index 00000000000..a196a6a7772 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/tfx-bsl.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2018] The Tensorflow Data Validation Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/tornado.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/tornado.LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/tornado.LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/traitlets.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/traitlets.LICENSE new file mode 100644 index 00000000000..39ca730a630 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/traitlets.LICENSE @@ -0,0 +1,62 @@ +# Licensing terms + +Traitlets is adapted from enthought.traits, Copyright (c) Enthought, Inc., +under the terms of the Modified BSD License. + +This project is licensed under the terms of the Modified BSD License +(also known as New or Revised or 3-Clause BSD), as follows: + +- Copyright (c) 2001-, IPython Development Team + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +Neither the name of the IPython Development Team nor the names of its +contributors may be used to endorse or promote products derived from this +software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +## About the IPython Development Team + +The IPython Development Team is the set of all contributors to the IPython project. +This includes all of the IPython subprojects. + +The core team that coordinates development on GitHub can be found here: +https://github.com/jupyter/. + +## Our Copyright Policy + +IPython uses a shared copyright model. Each contributor maintains copyright +over their contributions to IPython. But, it is important to note that these +contributions are typically only changes to the repositories. Thus, the IPython +source code, in its entirety is not the copyright of any single person or +institution. Instead, it is the collective copyright of the entire IPython +Development Team. If individual contributors want to maintain a record of what +changes/contributions they have specific copyright on, they should indicate +their copyright in the commit message of the change, when they commit the +change to one of the IPython repositories. + +With this in mind, the following banner should be used in any source code file +to indicate the copyright and license terms: + + # Copyright (c) IPython Development Team. + # Distributed under the terms of the Modified BSD License. diff --git a/backend/src/apiserver/visualization/third_party_licenses/typing-extensions.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/typing-extensions.LICENSE new file mode 100644 index 00000000000..583f9f6e617 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/typing-extensions.LICENSE @@ -0,0 +1,254 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are +retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the Internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the Internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/uritemplate.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/uritemplate.LICENSE new file mode 100644 index 00000000000..41c87e7cf26 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/uritemplate.LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to uritemplate are +made under the terms of *both* these licenses. diff --git a/backend/src/apiserver/visualization/third_party_licenses/urllib3.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/urllib3.LICENSE new file mode 100644 index 00000000000..429a1767e44 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/urllib3.LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/wcwidth.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/wcwidth.LICENSE new file mode 100644 index 00000000000..f0d3471a270 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/wcwidth.LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Jeff Quast + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/webencodings.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/webencodings.LICENSE new file mode 100644 index 00000000000..3d0d3e70595 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/webencodings.LICENSE @@ -0,0 +1,31 @@ +Copyright (c) 2012 by Simon Sapin. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * The names of the contributors may not be used to endorse or + promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/widgetsnbextension.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/widgetsnbextension.LICENSE new file mode 100644 index 00000000000..deb2c38c8ec --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/widgetsnbextension.LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2015 Project Jupyter Contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/wrapt.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/wrapt.LICENSE new file mode 100644 index 00000000000..d49cae8439d --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/wrapt.LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2013-2019, Graham Dumpleton +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/src/apiserver/visualization/third_party_licenses/zipp.LICENSE b/backend/src/apiserver/visualization/third_party_licenses/zipp.LICENSE new file mode 100644 index 00000000000..353924be0e5 --- /dev/null +++ b/backend/src/apiserver/visualization/third_party_licenses/zipp.LICENSE @@ -0,0 +1,19 @@ +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/backend/src/apiserver/visualization/types/tfdv.py b/backend/src/apiserver/visualization/types/tfdv.py index 0190f434e6d..ac3e303b60d 100644 --- a/backend/src/apiserver/visualization/types/tfdv.py +++ b/backend/src/apiserver/visualization/types/tfdv.py @@ -12,7 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import base64 import tensorflow_data_validation as tfdv +from IPython.display import display +from IPython.display import HTML +from tensorflow_metadata.proto.v0 import statistics_pb2 +from typing import Text # The following variables are provided through dependency injection. These # variables come from the specified input path and arguments provided by the @@ -20,6 +25,68 @@ # # source -train_stats = tfdv.generate_statistics_from_csv(data_location=source) +# train_stats = tfdv.generate_statistics_from_csv(data_location=source) +# tfdv.visualize_statistics(train_stats) -tfdv.visualize_statistics(train_stats) +def get_statistics_html( + lhs_statistics: statistics_pb2.DatasetFeatureStatisticsList +) -> Text: + """Build the HTML for visualizing the input statistics using Facets. + Args: + lhs_statistics: A DatasetFeatureStatisticsList protocol buffer. + Returns: + HTML to be embedded for visualization. + Raises: + TypeError: If the input argument is not of the expected type. + ValueError: If the input statistics protos does not have only one dataset. + """ + + rhs_statistics = None + lhs_name = 'lhs_statistics' + rhs_name = 'rhs_statistics' + + if not isinstance(lhs_statistics, + statistics_pb2.DatasetFeatureStatisticsList): + raise TypeError( + 'lhs_statistics is of type %s, should be ' + 'a DatasetFeatureStatisticsList proto.' % type(lhs_statistics).__name__) + + if len(lhs_statistics.datasets) != 1: + raise ValueError('lhs_statistics proto contains multiple datasets. Only ' + 'one dataset is currently supported.') + + if lhs_statistics.datasets[0].name: + lhs_name = lhs_statistics.datasets[0].name + + # Add lhs stats. + combined_statistics = statistics_pb2.DatasetFeatureStatisticsList() + lhs_stats_copy = combined_statistics.datasets.add() + lhs_stats_copy.MergeFrom(lhs_statistics.datasets[0]) + lhs_stats_copy.name = lhs_name + + protostr = base64.b64encode( + combined_statistics.SerializeToString()).decode('utf-8') + + # pylint: disable=line-too-long + # Note that in the html template we currently assign a temporary id to the + # facets element and then remove it once we have appended the serialized proto + # string to the element. We do this to avoid any collision of ids when + # displaying multiple facets output in the notebook. + html_template = """ + """ + # pylint: enable=line-too-long + html = html_template.replace('protostr', protostr) + + return html + +stats = tfdv.load_statistics(source) +html = get_statistics_html(stats) +display(HTML(html)) diff --git a/backend/src/cache/BUILD.bazel b/backend/src/cache/BUILD.bazel new file mode 100644 index 00000000000..709071958b5 --- /dev/null +++ b/backend/src/cache/BUILD.bazel @@ -0,0 +1,27 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library") + +go_library( + name = "go_default_library", + srcs = [ + "client_manager.go", + "main.go", + ], + importpath = "github.com/kubeflow/pipelines/backend/src/cache", + visibility = ["//visibility:private"], + deps = [ + "//backend/src/cache/client:go_default_library", + "//backend/src/cache/model:go_default_library", + "//backend/src/cache/server:go_default_library", + "//backend/src/cache/storage:go_default_library", + "//backend/src/common/util:go_default_library", + "@com_github_cenkalti_backoff//:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_jinzhu_gorm//:go_default_library", + ], +) + +go_binary( + name = "cache", + embed = [":go_default_library"], + visibility = ["//visibility:public"], +) diff --git a/backend/src/cache/OWNERS b/backend/src/cache/OWNERS new file mode 100644 index 00000000000..6a3d99036c4 --- /dev/null +++ b/backend/src/cache/OWNERS @@ -0,0 +1,6 @@ +approvers: + - Ark-kun + - rui5i +reviewers: + - Ark-kun + - rui5i \ No newline at end of file diff --git a/backend/src/cache/README.md b/backend/src/cache/README.md new file mode 100644 index 00000000000..4a8e0b9512b --- /dev/null +++ b/backend/src/cache/README.md @@ -0,0 +1,23 @@ +## Build src image +To build the Docker image of cache server, run the following Docker command from the pipelines directory: + +``` +docker build -t gcr.io/ml-pipeline/cache-server:latest -f backend/Dockerfile.cacheserver . +``` + +## Deploy cache service to an existing KFP deployment +1. Configure kubectl to talk to your newly created cluster. Refer to [Configuring cluster access for kubectl](https://cloud.google.com/kubernetes-engine/docs/how-to/cluster-access-for-kubectl). +2. Run deploy shell script to generate certificates and create MutatingWebhookConfiguration: + +``` +# Assume KFP is deployed in the namespace kubeflow +export NAMESPACE=kubeflow +./deployer/deploy-cache-service.sh +``` + +3. Go to pipelines/manifests/kustomize/base/cache folder and run following scripts: + +``` +kubectl apply -f cache-deployment.yaml --namespace $NAMESPACE +kubectl apply -f cache-service.yaml --namespace $NAMESPACE +``` diff --git a/backend/src/cache/client/BUILD.bazel b/backend/src/cache/client/BUILD.bazel new file mode 100644 index 00000000000..b04c858c529 --- /dev/null +++ b/backend/src/cache/client/BUILD.bazel @@ -0,0 +1,35 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +go_library( + name = "go_default_library", + srcs = [ + "kubernetes_core.go", + "kubernetes_core_fake.go", + "pod_fake.go", + "sql.go", + ], + importpath = "github.com/kubeflow/pipelines/backend/src/cache/client", + visibility = ["//visibility:public"], + deps = [ + "//backend/src/common/util:go_default_library", + "@com_github_cenkalti_backoff//:go_default_library", + "@com_github_go_sql_driver_mysql//:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_pkg_errors//:go_default_library", + "@io_k8s_api//core/v1:go_default_library", + "@io_k8s_api//policy/v1beta1:go_default_library", + "@io_k8s_apimachinery//pkg/apis/meta/v1:go_default_library", + "@io_k8s_apimachinery//pkg/types:go_default_library", + "@io_k8s_apimachinery//pkg/watch:go_default_library", + "@io_k8s_client_go//kubernetes:go_default_library", + "@io_k8s_client_go//kubernetes/typed/core/v1:go_default_library", + "@io_k8s_client_go//rest:go_default_library", + ], +) + +go_test( + name = "go_default_test", + srcs = ["sql_test.go"], + embed = [":go_default_library"], + deps = ["@com_github_go_sql_driver_mysql//:go_default_library"], +) diff --git a/backend/src/cache/client/kubernetes_core.go b/backend/src/cache/client/kubernetes_core.go new file mode 100644 index 00000000000..dcafc9b6b00 --- /dev/null +++ b/backend/src/cache/client/kubernetes_core.go @@ -0,0 +1,58 @@ +package client + +import ( + "time" + + "github.com/cenkalti/backoff" + "github.com/golang/glog" + "github.com/pkg/errors" + "k8s.io/client-go/kubernetes" + v1 "k8s.io/client-go/kubernetes/typed/core/v1" + "k8s.io/client-go/rest" +) + +type KubernetesCoreInterface interface { + PodClient(namespace string) v1.PodInterface +} + +type KubernetesCore struct { + coreV1Client v1.CoreV1Interface +} + +func (c *KubernetesCore) PodClient(namespace string) v1.PodInterface { + return c.coreV1Client.Pods(namespace) +} + +func createKubernetesCore() (KubernetesCoreInterface, error) { + restConfig, err := rest.InClusterConfig() + if err != nil { + return nil, errors.Wrap(err, "Failed to initialize kubernetes client.") + } + + clientSet, err := kubernetes.NewForConfig(restConfig) + if err != nil { + return nil, errors.Wrap(err, "Failed to initialize kubernetes client set.") + } + return &KubernetesCore{clientSet.CoreV1()}, nil +} + +// CreateKubernetesCoreOrFatal creates a new client for the Kubernetes pod. +func CreateKubernetesCoreOrFatal(initConnectionTimeout time.Duration) KubernetesCoreInterface { + var client KubernetesCoreInterface + var err error + var operation = func() error { + client, err = createKubernetesCore() + if err != nil { + return err + } + return nil + } + b := backoff.NewExponentialBackOff() + b.MaxElapsedTime = initConnectionTimeout + err = backoff.Retry(operation, b) + + if err != nil { + glog.Fatalf("Failed to create pod client. Error: %v", err) + } + return client +} diff --git a/backend/src/cache/client/kubernetes_core_fake.go b/backend/src/cache/client/kubernetes_core_fake.go new file mode 100644 index 00000000000..221f769b273 --- /dev/null +++ b/backend/src/cache/client/kubernetes_core_fake.go @@ -0,0 +1,47 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "github.com/kubeflow/pipelines/backend/src/common/util" + v1 "k8s.io/client-go/kubernetes/typed/core/v1" +) + +type FakeKuberneteCoreClient struct { + podClientFake *FakePodClient +} + +func (c *FakeKuberneteCoreClient) PodClient(namespace string) v1.PodInterface { + if len(namespace) == 0 { + panic(util.NewResourceNotFoundError("Namespace", namespace)) + } + return c.podClientFake +} + +func NewFakeKuberneteCoresClient() *FakeKuberneteCoreClient { + return &FakeKuberneteCoreClient{&FakePodClient{}} +} + +type FakeKubernetesCoreClientWithBadPodClient struct { + podClientFake *FakeBadPodClient +} + +func NewFakeKubernetesCoreClientWithBadPodClient() *FakeKubernetesCoreClientWithBadPodClient { + return &FakeKubernetesCoreClientWithBadPodClient{&FakeBadPodClient{}} +} + +func (c *FakeKubernetesCoreClientWithBadPodClient) PodClient(namespace string) v1.PodInterface { + return c.podClientFake +} diff --git a/backend/src/cache/client/pod_fake.go b/backend/src/cache/client/pod_fake.go new file mode 100644 index 00000000000..856dcba2730 --- /dev/null +++ b/backend/src/cache/client/pod_fake.go @@ -0,0 +1,105 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "errors" + + "github.com/golang/glog" + corev1 "k8s.io/api/core/v1" + "k8s.io/api/policy/v1beta1" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" + "k8s.io/apimachinery/pkg/watch" + "k8s.io/client-go/rest" +) + +type FakePodClient struct { + watchIsCalled bool + patchIsCalled bool +} + +func (FakePodClient) Create(*corev1.Pod) (*corev1.Pod, error) { + glog.Error("This fake method is not yet implemented.") + return nil, nil +} + +func (FakePodClient) Update(*corev1.Pod) (*corev1.Pod, error) { + glog.Error("This fake method is not yet implemented.") + return nil, nil +} + +func (FakePodClient) UpdateStatus(*corev1.Pod) (*corev1.Pod, error) { + glog.Error("This fake method is not yet implemented.") + return nil, nil +} + +func (FakePodClient) Delete(name string, options *v1.DeleteOptions) error { + return nil +} + +func (FakePodClient) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error { + glog.Error("This fake method is not yet implemented.") + return nil +} + +func (FakePodClient) Get(name string, options v1.GetOptions) (*corev1.Pod, error) { + glog.Error("This fake method is not yet implemented.") + return nil, nil +} + +func (FakePodClient) List(opts v1.ListOptions) (*corev1.PodList, error) { + glog.Error("This fake method is not yet implemented.") + return nil, nil +} + +func (f FakePodClient) Watch(opts v1.ListOptions) (watch.Interface, error) { + f.watchIsCalled = true + event := watch.Event{ + Type: watch.Added, + Object: &corev1.Pod{}, + } + ch := make(chan watch.Event, 1) + ch <- event + return nil, nil +} + +func (f FakePodClient) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *corev1.Pod, err error) { + f.patchIsCalled = true + return nil, nil +} + +func (FakePodClient) Bind(binding *corev1.Binding) error { + glog.Error("This fake method is not yet implemented.") + return nil +} + +func (FakePodClient) Evict(eviction *v1beta1.Eviction) error { + glog.Error("This fake method is not yet implemented.") + return nil +} + +func (FakePodClient) GetLogs(name string, opts *corev1.PodLogOptions) *rest.Request { + glog.Error("This fake method is not yet implemented.") + return nil +} + +type FakeBadPodClient struct { + FakePodClient +} + +func (FakeBadPodClient) Delete(name string, options *v1.DeleteOptions) error { + return errors.New("failed to delete pod") +} diff --git a/backend/src/cache/client/sql.go b/backend/src/cache/client/sql.go new file mode 100644 index 00000000000..8eaa39f257c --- /dev/null +++ b/backend/src/cache/client/sql.go @@ -0,0 +1,46 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "fmt" + + "github.com/go-sql-driver/mysql" +) + +func CreateMySQLConfig(user, password string, mysqlServiceHost string, + mysqlServicePort string, dbName string, mysqlGroupConcatMaxLen string, mysqlExtraParams map[string]string) *mysql.Config { + + params := map[string]string{ + "charset": "utf8", + "parseTime": "True", + "loc": "Local", + "group_concat_max_len": mysqlGroupConcatMaxLen, + } + + for k, v := range mysqlExtraParams { + params[k] = v + } + + return &mysql.Config{ + User: user, + Passwd: password, + Net: "tcp", + Addr: fmt.Sprintf("%s:%s", mysqlServiceHost, mysqlServicePort), + Params: params, + DBName: dbName, + AllowNativePasswords: true, + } +} diff --git a/backend/src/cache/client/sql_test.go b/backend/src/cache/client/sql_test.go new file mode 100644 index 00000000000..a9d27820848 --- /dev/null +++ b/backend/src/cache/client/sql_test.go @@ -0,0 +1,81 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package client + +import ( + "reflect" + "testing" + + "github.com/go-sql-driver/mysql" +) + +func TestCreateMySQLConfig(t *testing.T) { + type args struct { + user string + password string + host string + port string + dbName string + mysqlGroupConcatMaxLen string + mysqlExtraParams map[string]string + } + tests := []struct { + name string + args args + want *mysql.Config + }{ + { + name: "default config", + args: args{ + user: "root", + host: "mysql", + port: "3306", + mysqlGroupConcatMaxLen: "1024", + mysqlExtraParams: nil, + }, + want: &mysql.Config{ + User: "root", + Net: "tcp", + Addr: "mysql:3306", + Params: map[string]string{"charset": "utf8", "parseTime": "True", "loc": "Local", "group_concat_max_len": "1024"}, + AllowNativePasswords: true, + }, + }, + { + name: "extra parameters", + args: args{ + user: "root", + host: "mysql", + port: "3306", + mysqlGroupConcatMaxLen: "1024", + mysqlExtraParams: map[string]string{"tls": "true"}, + }, + want: &mysql.Config{ + User: "root", + Net: "tcp", + Addr: "mysql:3306", + Params: map[string]string{"charset": "utf8", "parseTime": "True", "loc": "Local", "group_concat_max_len": "1024", "tls": "true"}, + AllowNativePasswords: true, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := CreateMySQLConfig(tt.args.user, tt.args.password, tt.args.host, tt.args.port, tt.args.dbName, tt.args.mysqlGroupConcatMaxLen, tt.args.mysqlExtraParams); !reflect.DeepEqual(got, tt.want) { + t.Errorf("CreateMySQLConfig() = %#v, want %v", got, tt.want) + } + }) + } +} diff --git a/backend/src/cache/client_manager.go b/backend/src/cache/client_manager.go new file mode 100644 index 00000000000..05d09d6f83f --- /dev/null +++ b/backend/src/cache/client_manager.go @@ -0,0 +1,172 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "database/sql" + "fmt" + "log" + "time" + + "github.com/cenkalti/backoff" + "github.com/golang/glog" + "github.com/jinzhu/gorm" + "github.com/kubeflow/pipelines/backend/src/cache/client" + "github.com/kubeflow/pipelines/backend/src/cache/model" + "github.com/kubeflow/pipelines/backend/src/cache/storage" + "github.com/kubeflow/pipelines/backend/src/common/util" +) + +const ( + DefaultConnectionTimeout = "6m" +) + +type ClientManager struct { + db *storage.DB + cacheStore storage.ExecutionCacheStoreInterface + k8sCoreClient client.KubernetesCoreInterface + time util.TimeInterface +} + +func (c *ClientManager) CacheStore() storage.ExecutionCacheStoreInterface { + return c.cacheStore +} + +func (c *ClientManager) KubernetesCoreClient() client.KubernetesCoreInterface { + return c.k8sCoreClient +} + +func (c *ClientManager) Close() { + c.db.Close() +} + +func (c *ClientManager) init(params WhSvrDBParameters) { + timeoutDuration, _ := time.ParseDuration(DefaultConnectionTimeout) + db := initDBClient(params, timeoutDuration) + + c.time = util.NewRealTime() + c.db = db + c.cacheStore = storage.NewExecutionCacheStore(db, c.time) + c.k8sCoreClient = client.CreateKubernetesCoreOrFatal(timeoutDuration) +} + +func initDBClient(params WhSvrDBParameters, initConnectionTimeout time.Duration) *storage.DB { + driverName := params.dbDriver + var arg string + + switch driverName { + case mysqlDBDriverDefault: + arg = initMysql(params, initConnectionTimeout) + default: + glog.Fatalf("Driver %v is not supported", driverName) + } + + // db is safe for concurrent use by multiple goroutines + // and maintains its own pool of idle connections. + db, err := gorm.Open(driverName, arg) + util.TerminateIfError(err) + + // Create table + response := db.AutoMigrate(&model.ExecutionCache{}) + if response.Error != nil { + glog.Fatalf("Failed to initialize the databases.") + } + + response = db.Model(&model.ExecutionCache{}).ModifyColumn("ExecutionOutput", "longtext") + if response.Error != nil { + glog.Fatalf("Failed to update the execution output type. Error: %s", response.Error) + } + response = db.Model(&model.ExecutionCache{}).ModifyColumn("ExecutionTemplate", "longtext not null") + if response.Error != nil { + glog.Fatalf("Failed to update the execution template type. Error: %s", response.Error) + } + + var tableNames []string + db.Raw(`show tables`).Pluck("Tables_in_caches", &tableNames) + for _, tableName := range tableNames { + log.Printf(tableName) + } + + return storage.NewDB(db) +} + +func initMysql(params WhSvrDBParameters, initConnectionTimeout time.Duration) string { + mysqlConfig := client.CreateMySQLConfig( + params.dbUser, + params.dbPwd, + params.dbHost, + params.dbPort, + "", + params.dbGroupConcatMaxLen, + map[string]string{}, + ) + + var db *sql.DB + var err error + var operation = func() error { + db, err = sql.Open(params.dbDriver, mysqlConfig.FormatDSN()) + if err != nil { + return err + } + return nil + } + b := backoff.NewExponentialBackOff() + b.MaxElapsedTime = initConnectionTimeout + err = backoff.Retry(operation, b) + + defer db.Close() + util.TerminateIfError(err) + + // Create database if not exist + dbName := params.dbName + operation = func() error { + _, err = db.Exec(fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s", dbName)) + if err != nil { + return err + } + log.Printf("Database created") + return nil + } + b = backoff.NewExponentialBackOff() + b.MaxElapsedTime = initConnectionTimeout + err = backoff.Retry(operation, b) + + operation = func() error { + _, err = db.Exec(fmt.Sprintf("USE %s", dbName)) + if err != nil { + return err + } + return nil + } + b = backoff.NewExponentialBackOff() + b.MaxElapsedTime = initConnectionTimeout + err = backoff.Retry(operation, b) + + util.TerminateIfError(err) + mysqlConfig.DBName = dbName + // When updating, return rows matched instead of rows affected. This counts rows that are being + // set as the same values as before. If updating using a primary key and rows matched is 0, then + // it means this row is not found. + // Config reference: https://github.com/go-sql-driver/mysql#clientfoundrows + mysqlConfig.ClientFoundRows = true + return mysqlConfig.FormatDSN() +} + +func NewClientManager(params WhSvrDBParameters) ClientManager { + clientManager := ClientManager{} + clientManager.init(params) + + return clientManager +} diff --git a/backend/src/cache/deployer/Dockerfile b/backend/src/cache/deployer/Dockerfile new file mode 100644 index 00000000000..f1e24b1a2f1 --- /dev/null +++ b/backend/src/cache/deployer/Dockerfile @@ -0,0 +1,23 @@ +FROM google/cloud-sdk:alpine + +RUN apk add --update \ + python \ + curl \ + which \ + jq \ + bash \ + openssl + +RUN gcloud components install kubectl + +ADD backend/src/cache/deployer/* /kfp/cache/deployer/ + +WORKDIR /kfp/cache/deployer + +RUN chmod +x deploy-cache-service.sh +RUN chmod +x webhook-create-signed-cert.sh +RUN chmod +x webhook-patch-ca-bundle.sh + +COPY third_party/license.txt /bin/license.txt + +ENTRYPOINT ["/bin/sh", "/kfp/cache/deployer/deploy-cache-service.sh"] \ No newline at end of file diff --git a/backend/src/cache/deployer/cache-configmap.yaml.template b/backend/src/cache/deployer/cache-configmap.yaml.template new file mode 100644 index 00000000000..8f4d217ca5a --- /dev/null +++ b/backend/src/cache/deployer/cache-configmap.yaml.template @@ -0,0 +1,17 @@ +apiVersion: admissionregistration.k8s.io/v1beta1 +kind: MutatingWebhookConfiguration +metadata: + name: cache-webhook-${NAMESPACE} +webhooks: + - name: cache-server.${NAMESPACE}.svc + clientConfig: + service: + name: cache-server + namespace: ${NAMESPACE} + path: "/mutate" + caBundle: ${CA_BUNDLE} + rules: + - operations: [ "CREATE" ] + apiGroups: [""] + apiVersions: ["v1"] + resources: ["pods"] diff --git a/backend/src/cache/deployer/deploy-cache-service.sh b/backend/src/cache/deployer/deploy-cache-service.sh new file mode 100755 index 00000000000..268006201dd --- /dev/null +++ b/backend/src/cache/deployer/deploy-cache-service.sh @@ -0,0 +1,69 @@ +#!/bin/bash +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script is for deploying cache service to an existing cluster. +# Prerequisite: config kubectl to talk to your cluster. See ref below: +# https://cloud.google.com/kubernetes-engine/docs/how-to/cluster-access-for-kubectl + +set -ex + +echo "Start deploying cache service to existing cluster:" + +NAMESPACE=${NAMESPACE_TO_WATCH:-kubeflow} +MUTATING_WEBHOOK_CONFIGURATION_NAME="cache-webhook-${NAMESPACE}" + +# This should fail if there are connectivity problems +# Gotcha: Listing all objects requires list permission, +# but when listing a single oblect kubecttl will fail if it's not found +# unless --ignore-not-found is specified. +kubectl get mutatingwebhookconfigurations "${MUTATING_WEBHOOK_CONFIGURATION_NAME}" --namespace "${NAMESPACE}" --ignore-not-found >webhooks.txt + +if grep "${MUTATING_WEBHOOK_CONFIGURATION_NAME}" -w ./cache-configmap-ca-bundle.yaml +echo "CA_BUNDLE patched successfully" + +# Create MutatingWebhookConfiguration +cat ./cache-configmap-ca-bundle.yaml +kubectl apply -f ./cache-configmap-ca-bundle.yaml --namespace "${NAMESPACE}" + +# TODO: Check whether we really need to check for the existence of the webhook +# Usually the Kubernetes objects appear immediately. +while true; do + # Should fail if there are connectivity problems + kubectl get mutatingwebhookconfigurations "${MUTATING_WEBHOOK_CONFIGURATION_NAME}" --namespace "${NAMESPACE}" --ignore-not-found >webhooks.txt + + if grep "${MUTATING_WEBHOOK_CONFIGURATION_NAME}" -w > ${tmpdir}/csr.conf +[req] +req_extensions = v3_req +distinguished_name = req_distinguished_name +[req_distinguished_name] +[ v3_req ] +basicConstraints = CA:FALSE +keyUsage = nonRepudiation, digitalSignature, keyEncipherment +extendedKeyUsage = serverAuth +subjectAltName = @alt_names +[alt_names] +DNS.1 = ${service} +DNS.2 = ${service}.${namespace} +DNS.3 = ${service}.${namespace}.svc +EOF + +openssl genrsa -out ${tmpdir}/server-key.pem 2048 +openssl req -new -key ${tmpdir}/server-key.pem -subj "/CN=${service}.${namespace}.svc" -out ${tmpdir}/server.csr -config ${tmpdir}/csr.conf + +echo "start running kubectl..." + +# clean-up any previously created CSR for our service. Ignore errors if not present. +kubectl delete csr ${csrName} 2>/dev/null || true + +# create server cert/key CSR and send to k8s API +cat <&2 + exit 1 +fi +echo ${serverCert} | openssl base64 -d -A -out ${tmpdir}/server-cert.pem + +echo ${serverCert} > ${cert_output_path} + +# create the secret with CA cert and server cert/key +kubectl create secret generic ${secret} \ + --from-file=key.pem=${tmpdir}/server-key.pem \ + --from-file=cert.pem=${tmpdir}/server-cert.pem \ + --dry-run -o yaml | + kubectl -n ${namespace} apply -f - \ No newline at end of file diff --git a/backend/src/cache/deployer/webhook-patch-ca-bundle.sh b/backend/src/cache/deployer/webhook-patch-ca-bundle.sh new file mode 100755 index 00000000000..3515549f9cf --- /dev/null +++ b/backend/src/cache/deployer/webhook-patch-ca-bundle.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file will patch CA bundle and namespace to MutatingWebhookConfiguration + +ROOT=$(cd $(dirname $0)/../../; pwd) + +set -o errexit +set -o nounset +set -o pipefail +set -ex + +while [[ $# -gt 0 ]]; do + case ${1} in + --cert_input_path) + cert_input_path="$2" + shift + ;; + esac + shift +done + +[ -z ${cert_input_path} ] && cert_input_path=${CA_FILE} + +export CA_BUNDLE=$(cat ${cert_input_path}) + +if command -v envsubst >/dev/null 2>&1; then + envsubst +else + sed -e "s|\${CA_BUNDLE}|${CA_BUNDLE}|g" -e "s|\${NAMESPACE}|${NAMESPACE}|g" +fi diff --git a/backend/src/cache/main.go b/backend/src/cache/main.go new file mode 100644 index 00000000000..f5e8f8138d5 --- /dev/null +++ b/backend/src/cache/main.go @@ -0,0 +1,87 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import ( + "flag" + "log" + "net/http" + "path/filepath" + + "github.com/kubeflow/pipelines/backend/src/cache/server" +) + +const ( + TLSDir string = "/etc/webhook/certs" + TLSCertFile string = "cert.pem" + TLSKeyFile string = "key.pem" +) + +const ( + MutateAPI string = "/mutate" + WebhookPort string = ":8443" +) + +const ( + initConnectionTimeout = "6m" + + mysqlDBDriverDefault = "mysql" + mysqlDBHostDefault = "mysql" + mysqlDBPortDefault = "3306" + mysqlDBGroupConcatMaxLenDefault = "4194304" +) + +type WhSvrDBParameters struct { + dbDriver string + dbHost string + dbPort string + dbName string + dbUser string + dbPwd string + dbGroupConcatMaxLen string + namespaceToWatch string +} + +func main() { + var params WhSvrDBParameters + flag.StringVar(¶ms.dbDriver, "db_driver", mysqlDBDriverDefault, "Database driver name, mysql is the default value") + flag.StringVar(¶ms.dbHost, "db_host", mysqlDBHostDefault, "Database host name.") + flag.StringVar(¶ms.dbPort, "db_port", mysqlDBPortDefault, "Database port number.") + flag.StringVar(¶ms.dbName, "db_name", "cachedb", "Database name.") + flag.StringVar(¶ms.dbUser, "db_user", "root", "Database user name.") + flag.StringVar(¶ms.dbPwd, "db_password", "", "Database password.") + flag.StringVar(¶ms.dbGroupConcatMaxLen, "db_group_concat_max_len", mysqlDBGroupConcatMaxLenDefault, "Database group concat max length.") + flag.StringVar(¶ms.namespaceToWatch, "namespace_to_watch", "kubeflow", "Namespace to watch.") + + flag.Parse() + + log.Println("Initing client manager....") + clientManager := NewClientManager(params) + + go server.WatchPods(params.namespaceToWatch, &clientManager) + + certPath := filepath.Join(TLSDir, TLSCertFile) + keyPath := filepath.Join(TLSDir, TLSKeyFile) + + mux := http.NewServeMux() + mux.Handle(MutateAPI, server.AdmitFuncHandler(server.MutatePodIfCached, &clientManager)) + server := &http.Server{ + // We listen on port 8443 such that we do not need root privileges or extra capabilities for this server. + // The Service object will take care of mapping this port to the HTTPS port 443. + Addr: WebhookPort, + Handler: mux, + } + log.Fatal(server.ListenAndServeTLS(certPath, keyPath)) +} diff --git a/backend/src/cache/model/BUILD.bazel b/backend/src/cache/model/BUILD.bazel new file mode 100644 index 00000000000..c69c82c1449 --- /dev/null +++ b/backend/src/cache/model/BUILD.bazel @@ -0,0 +1,8 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["execution_cache.go"], + importpath = "github.com/kubeflow/pipelines/backend/src/cache/model", + visibility = ["//visibility:public"], +) diff --git a/backend/src/cache/model/execution_cache.go b/backend/src/cache/model/execution_cache.go new file mode 100644 index 00000000000..e60f82701f1 --- /dev/null +++ b/backend/src/cache/model/execution_cache.go @@ -0,0 +1,45 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +type ExecutionCache struct { + ID int64 `gorm:"column:ID; not null; primary_key; AUTO_INCREMENT"` + ExecutionCacheKey string `gorm:"column:ExecutionCacheKey; not null; index:idx_cache_key"` + ExecutionTemplate string `gorm:"column:ExecutionTemplate; not null"` + ExecutionOutput string `gorm:"column:ExecutionOutput; not null"` + MaxCacheStaleness int64 `gorm:"column:MaxCacheStaleness; not null"` + StartedAtInSec int64 `gorm:"column:StartedAtInSec; not null"` + EndedAtInSec int64 `gorm:"column:EndedAtInSec; not null"` +} + +// GetValueOfPrimaryKey returns the value of ExecutionCacheKey. +func (e ExecutionCache) GetValueOfPrimaryKey() int64 { + return e.ID +} + +// GetExecutionCacheTablePrimaryKeyColumn returns the primary key column of ExecutionCache. +func GetExecutionCacheTablePrimaryKeyColumn() string { + return "ID" +} + +// PrimaryKeyColumnName returns the primary key for ExecutionCache. +func (e *ExecutionCache) PrimaryKeyColumnName() string { + return "ID" +} + +// GetModelName returns the name of ExecutionCache. +func (e *ExecutionCache) GetModelName() string { + return "executionCaches" +} diff --git a/backend/src/cache/server/BUILD.bazel b/backend/src/cache/server/BUILD.bazel new file mode 100644 index 00000000000..992721f2825 --- /dev/null +++ b/backend/src/cache/server/BUILD.bazel @@ -0,0 +1,47 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +go_library( + name = "go_default_library", + srcs = [ + "admission.go", + "client_manager_fake.go", + "mutation.go", + "watcher.go", + ], + importpath = "github.com/kubeflow/pipelines/backend/src/cache/server", + visibility = ["//visibility:public"], + deps = [ + "//backend/src/cache/client:go_default_library", + "//backend/src/cache/model:go_default_library", + "//backend/src/cache/storage:go_default_library", + "//backend/src/common/util:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_peterhellberg_duration//:go_default_library", + "@io_k8s_api//admission/v1beta1:go_default_library", + "@io_k8s_api//core/v1:go_default_library", + "@io_k8s_apimachinery//pkg/apis/meta/v1:go_default_library", + "@io_k8s_apimachinery//pkg/runtime:go_default_library", + "@io_k8s_apimachinery//pkg/runtime/serializer:go_default_library", + "@io_k8s_apimachinery//pkg/types:go_default_library", + "@io_k8s_apimachinery//pkg/watch:go_default_library", + ], +) + +go_test( + name = "go_default_test", + srcs = [ + "admission_test.go", + "mutation_test.go", + ], + embed = [":go_default_library"], + deps = [ + "//backend/src/cache/model:go_default_library", + "//backend/src/common/util:go_default_library", + "@com_github_stretchr_testify//assert:go_default_library", + "@com_github_stretchr_testify//require:go_default_library", + "@io_k8s_api//admission/v1beta1:go_default_library", + "@io_k8s_api//core/v1:go_default_library", + "@io_k8s_apimachinery//pkg/apis/meta/v1:go_default_library", + "@io_k8s_apimachinery//pkg/runtime:go_default_library", + ], +) diff --git a/backend/src/cache/server/admission.go b/backend/src/cache/server/admission.go new file mode 100644 index 00000000000..1c0ddc188da --- /dev/null +++ b/backend/src/cache/server/admission.go @@ -0,0 +1,180 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "log" + "net/http" + + "k8s.io/api/admission/v1beta1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/runtime" + "k8s.io/apimachinery/pkg/runtime/serializer" + "k8s.io/apimachinery/pkg/types" +) + +type OperationType string + +const ( + OperationTypeAdd OperationType = "add" + OperationTypeReplace OperationType = "replace" + OperationTypeRemove OperationType = "remove" +) + +// patchOperation is an operation of a JSON patch, see https://tools.ietf.org/html/rfc6902 . +type patchOperation struct { + Op OperationType `json:"op"` + Path string `json:"path"` + Value interface{} `json:"value,omitempty"` +} + +// admitFunc is a callback for admission controller logic. Given an AdmissionRequest, it returns the sequence of patch +// operations to be applied in case of success, or the error that will be shown when the operation is rejected. +type admitFunc func(_ *v1beta1.AdmissionRequest, clientMgr ClientManagerInterface) ([]patchOperation, error) + +const ( + ContentType string = "Content-Type" + JsonContentType string = "application/json" +) + +var ( + universalDeserializer = serializer.NewCodecFactory(runtime.NewScheme()).UniversalDeserializer() +) + +// isKubeNamespace checks if the given namespace is a Kubernetes-owned namespace. +func isKubeNamespace(ns string) bool { + return ns == metav1.NamespacePublic || ns == metav1.NamespaceSystem +} + +// doServeAdmitFunc parses the HTTP request for an admission controller webhook, and -- in case of a well-formed +// request -- delegates the admission control logic to the given admitFunc. The response body is then returned as raw +// bytes. +func doServeAdmitFunc(w http.ResponseWriter, r *http.Request, admit admitFunc, clientMgr ClientManagerInterface) ([]byte, error) { + // Step 1: Request validation. Only handle POST requests with a body and json content type. + + if r.Method != http.MethodPost { + w.WriteHeader(http.StatusMethodNotAllowed) + return nil, fmt.Errorf("Invalid method %q, only POST requests are allowed", r.Method) + } + + body, err := ioutil.ReadAll(r.Body) + if err != nil { + w.WriteHeader(http.StatusBadRequest) + return nil, fmt.Errorf("Could not read request body: %v", err) + } + + if contentType := r.Header.Get(ContentType); contentType != JsonContentType { + w.WriteHeader(http.StatusBadRequest) + return nil, fmt.Errorf("Unsupported content type %q, only %q is supported", contentType, JsonContentType) + } + + // Step 2: Parse the AdmissionReview request. + + var admissionReviewReq v1beta1.AdmissionReview + + _, _, err = universalDeserializer.Decode(body, nil, &admissionReviewReq) + + if err != nil { + w.WriteHeader(http.StatusBadRequest) + return nil, fmt.Errorf("Could not deserialize request: %v", err) + } + if admissionReviewReq.Request == nil { + w.WriteHeader(http.StatusBadRequest) + return nil, errors.New("Malformed admission review request: request body is nil") + } + + // Step 3: Construct the AdmissionReview response. + + // Apply the admit() function only for non-Kubernetes namespaces. For objects in Kubernetes namespaces, return + // an empty set of patch operations. + if isKubeNamespace(admissionReviewReq.Request.Namespace) { + return allowedResponse(admissionReviewReq.Request.UID, nil), nil + } + + var patchOps []patchOperation + + patchOps, err = admit(admissionReviewReq.Request, clientMgr) + if err != nil { + return errorResponse(admissionReviewReq.Request.UID, err), nil + } + + patchBytes, err := json.Marshal(patchOps) + if err != nil { + w.WriteHeader(http.StatusInternalServerError) + return nil, fmt.Errorf("Could not marshal JSON patch: %v", err) + } + + return allowedResponse(admissionReviewReq.Request.UID, patchBytes), nil +} + +// serveAdmitFunc is a wrapper around doServeAdmitFunc that adds error handling and logging. +func serveAdmitFunc(w http.ResponseWriter, r *http.Request, admit admitFunc, clientMgr ClientManagerInterface) { + log.Print("Handling webhook request ...") + + var writeErr error + if bytes, err := doServeAdmitFunc(w, r, admit, clientMgr); err != nil { + log.Printf("Error handling webhook request: %v", err) + w.WriteHeader(http.StatusInternalServerError) + _, writeErr = w.Write([]byte(err.Error())) + } else { + log.Print("Webhook request handled successfully") + _, writeErr = w.Write(bytes) + } + + if writeErr != nil { + log.Printf("Could not write response: %v", writeErr) + } +} + +// AdmitFuncHandler takes an admitFunc and wraps it into a http.Handler by means of calling serveAdmitFunc. +func AdmitFuncHandler(admit admitFunc, clientMgr ClientManagerInterface) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + serveAdmitFunc(w, r, admit, clientMgr) + }) +} + +func allowedResponse(uid types.UID, patchBytes []byte) []byte { + admissionReviewResponse := v1beta1.AdmissionReview{ + Response: &v1beta1.AdmissionResponse{ + UID: uid, + }, + } + admissionReviewResponse.Response.Allowed = true + admissionReviewResponse.Response.Patch = patchBytes + + bytes, err := json.Marshal(&admissionReviewResponse) + if err != nil { + return errorResponse(uid, err) + } + return bytes +} + +func errorResponse(uid types.UID, err error) []byte { + admissionReviewResponse := v1beta1.AdmissionReview{ + Response: &v1beta1.AdmissionResponse{ + UID: uid, + }, + } + admissionReviewResponse.Response.Allowed = false + admissionReviewResponse.Response.Result = &metav1.Status{ + Message: err.Error(), + } + bytes, _ := json.Marshal(&admissionReviewResponse) + return bytes +} diff --git a/backend/src/cache/server/admission_test.go b/backend/src/cache/server/admission_test.go new file mode 100644 index 00000000000..54752891936 --- /dev/null +++ b/backend/src/cache/server/admission_test.go @@ -0,0 +1,114 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "k8s.io/api/admission/v1beta1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +var ( + fakeAdmissionReview = v1beta1.AdmissionReview{ + TypeMeta: metav1.TypeMeta{ + Kind: "pods", + APIVersion: "v1", + }, + Request: &v1beta1.AdmissionRequest{ + UID: "123", + }, + Response: &v1beta1.AdmissionResponse{ + UID: "123", + }, + } +) + +var fakeClientManager = NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) + +func fakeAdmitFunc(req *v1beta1.AdmissionRequest, clientMgr ClientManagerInterface) ([]patchOperation, error) { + operation := patchOperation{ + Op: OperationTypeAdd, + Path: "test", + Value: "test", + } + return []patchOperation{operation}, nil +} + +func TestIsKubeNamespace(t *testing.T) { + assert.True(t, isKubeNamespace("kube-public")) + assert.True(t, isKubeNamespace("kube-system")) + assert.False(t, isKubeNamespace("kube")) +} + +func TestDoServeAdmitFunc(t *testing.T) { + body, err := json.Marshal(fakeAdmissionReview) + req, err := http.NewRequest("POST", "/url", strings.NewReader(string(body))) + req.Header.Set("Content-Type", "application/json") + + if err != nil { + t.Fatalf("failed to send fake request, err: %v", err) + } + + rr := httptest.NewRecorder() + patchOperations, err := doServeAdmitFunc(rr, req, fakeAdmitFunc, fakeClientManager) + require.NotNil(t, patchOperations) + assert.Nil(t, err) +} + +func TestDoServeAdmitFuncWithInvalidHttpMethod(t *testing.T) { + req, _ := http.NewRequest("Get", "", nil) + rr := httptest.NewRecorder() + patchOperations, err := doServeAdmitFunc(rr, req, fakeAdmitFunc, fakeClientManager) + assert.Nil(t, patchOperations) + assert.Contains(t, err.Error(), "Invalid method") +} + +func TestDoServeAdmitFuncWithInvalidContentType(t *testing.T) { + req, err := http.NewRequest("POST", "/url", strings.NewReader("")) + rr := httptest.NewRecorder() + patchOperations, err := doServeAdmitFunc(rr, req, fakeAdmitFunc, fakeClientManager) + assert.Nil(t, patchOperations) + assert.Contains(t, err.Error(), "Unsupported content type") +} + +func TestDoServeAdmitFuncWithInvalidRequestBody(t *testing.T) { + req, err := http.NewRequest("POST", "/url", strings.NewReader("invalid")) + req.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + patchOperations, err := doServeAdmitFunc(rr, req, fakeAdmitFunc, fakeClientManager) + assert.Nil(t, patchOperations) + assert.Contains(t, err.Error(), "Could not deserialize request") +} + +func TestDoServeAdmitFuncWithEmptyAdmissionRequest(t *testing.T) { + invalidRequest := fakeAdmissionReview + invalidRequest.Request = nil + body, _ := json.Marshal(invalidRequest) + req, _ := http.NewRequest("POST", "/url", strings.NewReader(string(body))) + req.Header.Set("Content-Type", "application/json") + + rr := httptest.NewRecorder() + patchOperations, err := doServeAdmitFunc(rr, req, fakeAdmitFunc, fakeClientManager) + assert.Nil(t, patchOperations) + assert.Contains(t, err.Error(), "Malformed admission review request: request body is nil") +} diff --git a/backend/src/cache/server/client_manager_fake.go b/backend/src/cache/server/client_manager_fake.go new file mode 100644 index 00000000000..d26e1271d08 --- /dev/null +++ b/backend/src/cache/server/client_manager_fake.go @@ -0,0 +1,75 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "github.com/golang/glog" + "github.com/kubeflow/pipelines/backend/src/cache/client" + "github.com/kubeflow/pipelines/backend/src/cache/storage" + "github.com/kubeflow/pipelines/backend/src/common/util" +) + +type FakeClientManager struct { + db *storage.DB + cacheStore storage.ExecutionCacheStoreInterface + k8sCoreClientFake *client.FakeKuberneteCoreClient + time util.TimeInterface +} + +func NewFakeClientManager(time util.TimeInterface) (*FakeClientManager, error) { + if time == nil { + glog.Fatalf("The time parameter must not be null.") // Must never happen + } + // Initialize GORM + db, err := storage.NewFakeDb() + if err != nil { + return nil, err + } + + return &FakeClientManager{ + db: db, + cacheStore: storage.NewExecutionCacheStore(db, time), + k8sCoreClientFake: client.NewFakeKuberneteCoresClient(), + time: time, + }, nil +} + +func NewFakeClientManagerOrFatal(time util.TimeInterface) *FakeClientManager { + fakeStore, err := NewFakeClientManager(time) + if err != nil { + glog.Fatal("The fake store doesn't create successfully. Fail fast.") + } + return fakeStore +} + +func (f *FakeClientManager) CacheStore() storage.ExecutionCacheStoreInterface { + return f.cacheStore +} + +func (f *FakeClientManager) Time() util.TimeInterface { + return f.time +} + +func (f *FakeClientManager) DB() *storage.DB { + return f.db +} + +func (f *FakeClientManager) Close() error { + return f.db.Close() +} + +func (f *FakeClientManager) KubernetesCoreClient() client.KubernetesCoreInterface { + return f.k8sCoreClientFake +} diff --git a/backend/src/cache/server/mutation.go b/backend/src/cache/server/mutation.go new file mode 100644 index 00000000000..201c4a23434 --- /dev/null +++ b/backend/src/cache/server/mutation.go @@ -0,0 +1,241 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "log" + "strconv" + "strings" + + "github.com/kubeflow/pipelines/backend/src/cache/client" + "github.com/kubeflow/pipelines/backend/src/cache/model" + "github.com/kubeflow/pipelines/backend/src/cache/storage" + "k8s.io/api/admission/v1beta1" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +const ( + KFPCacheEnabledLabelKey string = "pipelines.kubeflow.org/cache_enabled" + KFPCacheEnabledLabelValue string = "true" + KFPCachedLabelKey string = "pipelines.kubeflow.org/reused_from_cache" + KFPCachedLabelValue string = "true" + ArgoWorkflowNodeName string = "workflows.argoproj.io/node-name" + ArgoWorkflowTemplate string = "workflows.argoproj.io/template" + ExecutionKey string = "pipelines.kubeflow.org/execution_cache_key" + CacheIDLabelKey string = "pipelines.kubeflow.org/cache_id" + ArgoWorkflowOutputs string = "workflows.argoproj.io/outputs" + MetadataWrittenKey string = "pipelines.kubeflow.org/metadata_written" + AnnotationPath string = "/metadata/annotations" + LabelPath string = "/metadata/labels" + SpecContainersPath string = "/spec/containers" + SpecInitContainersPath string = "/spec/initContainers" + TFXPodSuffix string = "tfx/orchestration/kubeflow/container_entrypoint.py" + ArchiveLocationKey string = "archiveLocation" +) + +var ( + podResource = metav1.GroupVersionResource{Version: "v1", Resource: "pods"} +) + +type ClientManagerInterface interface { + CacheStore() storage.ExecutionCacheStoreInterface + KubernetesCoreClient() client.KubernetesCoreInterface +} + +// MutatePodIfCached will check whether the execution has already been run before from MLMD and apply the output into pod.metadata.output +func MutatePodIfCached(req *v1beta1.AdmissionRequest, clientMgr ClientManagerInterface) ([]patchOperation, error) { + // This handler should only get called on Pod objects as per the MutatingWebhookConfiguration in the YAML file. + // However, if (for whatever reason) this gets invoked on an object of a different kind, issue a log message but + // let the object request pass through otherwise. + if req.Resource != podResource { + log.Printf("Expect resource to be %q, but found %q", podResource, req.Resource) + return nil, nil + } + + // Parse the Pod object. + raw := req.Object.Raw + pod := corev1.Pod{} + if _, _, err := universalDeserializer.Decode(raw, nil, &pod); err != nil { + return nil, fmt.Errorf("could not deserialize pod object: %v", err) + } + + // Pod filtering to only cache KFP argo pods except TFX pods + // TODO: Switch to objectSelector once Kubernetes 1.15 hits the GKE stable channel. See + // https://github.com/kubernetes/kubernetes/pull/78505 + // https://cloud.google.com/kubernetes-engine/docs/release-notes-stable + if !isKFPCacheEnabled(&pod) { + log.Printf("This pod %s does not enable cache.", pod.ObjectMeta.Name) + return nil, nil + } + + if isTFXPod(&pod) { + log.Printf("This pod %s is created by tfx pipelines.", pod.ObjectMeta.Name) + return nil, nil + } + + var patches []patchOperation + annotations := pod.ObjectMeta.Annotations + labels := pod.ObjectMeta.Labels + template, exists := annotations[ArgoWorkflowTemplate] + var executionHashKey string + if !exists { + return patches, nil + } + + // Generate the executionHashKey based on pod.metadata.annotations.workflows.argoproj.io/template + executionHashKey, err := generateCacheKeyFromTemplate(template) + log.Println(executionHashKey) + if err != nil { + log.Printf("Unable to generate cache key for pod %s : %s", pod.ObjectMeta.Name, err.Error()) + return patches, nil + } + + annotations[ExecutionKey] = executionHashKey + labels[CacheIDLabelKey] = "" + var maxCacheStalenessInSeconds int64 = -1 + maxCacheStaleness, exists := annotations[MaxCacheStalenessKey] + if exists { + maxCacheStalenessInSeconds = getMaxCacheStaleness(maxCacheStaleness) + } + + var cachedExecution *model.ExecutionCache + cachedExecution, err = clientMgr.CacheStore().GetExecutionCache(executionHashKey, maxCacheStalenessInSeconds) + if err != nil { + log.Println(err.Error()) + } + // Found cached execution, add cached output and cache_id and replace container images. + if cachedExecution != nil { + log.Println("Cached output: " + cachedExecution.ExecutionOutput) + + annotations[ArgoWorkflowOutputs] = getValueFromSerializedMap(cachedExecution.ExecutionOutput, ArgoWorkflowOutputs) + labels[CacheIDLabelKey] = strconv.FormatInt(cachedExecution.ID, 10) + labels[KFPCachedLabelKey] = KFPCachedLabelValue // This label indicates the pod is taken from cache. + + // These labels cache results for metadata-writer. + labels[MetadataExecutionIDKey] = getValueFromSerializedMap(cachedExecution.ExecutionOutput, MetadataExecutionIDKey) + labels[MetadataWrittenKey] = "true" + + dummyContainer := corev1.Container{ + Name: "main", + Image: "alpine", + Command: []string{`echo`, `"This step output is taken from cache."`}, + } + dummyContainers := []corev1.Container{ + dummyContainer, + } + patches = append(patches, patchOperation{ + Op: OperationTypeReplace, + Path: SpecContainersPath, + Value: dummyContainers, + }) + if pod.Spec.InitContainers != nil || len(pod.Spec.InitContainers) != 0 { + patches = append(patches, patchOperation{ + Op: OperationTypeRemove, + Path: SpecInitContainersPath, + }) + } + } + + // Add executionKey to pod.metadata.annotations + patches = append(patches, patchOperation{ + Op: OperationTypeAdd, + Path: AnnotationPath, + Value: annotations, + }) + + // Add cache_id label key + patches = append(patches, patchOperation{ + Op: OperationTypeAdd, + Path: LabelPath, + Value: labels, + }) + + return patches, nil +} + +func generateCacheKeyFromTemplate(template string) (string, error) { + var templateMap map[string]interface{} + b := []byte(template) + err := json.Unmarshal(b, &templateMap) + if err != nil { + return "", err + } + + // template[archiveLocation] needs to be removed when calculating cache key. + // Because archiveLocation.key is different in every single run. + _, exists := templateMap[ArchiveLocationKey] + if exists { + log.Println("ArchiveLocation exists in template.") + delete(templateMap, ArchiveLocationKey) + } + b, err = json.Marshal(templateMap) + if err != nil { + return "", err + } + hash := sha256.New() + hash.Write(b) + md := hash.Sum(nil) + executionHashKey := hex.EncodeToString(md) + + return executionHashKey, nil +} + +func getValueFromSerializedMap(serializedMap string, key string) string { + var outputMap map[string]interface{} + b := []byte(serializedMap) + err := json.Unmarshal(b, &outputMap) + if err != nil { + return "" + } + + value, exist := outputMap[key].(string) + if !exist || value == "" { + return "" + } + return value +} + +func isKFPCacheEnabled(pod *corev1.Pod) bool { + cacheEnabled, exists := pod.ObjectMeta.Labels[KFPCacheEnabledLabelKey] + if !exists { + log.Printf("This pod %s is not created by KFP.", pod.ObjectMeta.Name) + return false + } + return cacheEnabled == KFPCacheEnabledLabelValue +} + +func isTFXPod(pod *corev1.Pod) bool { + containers := pod.Spec.Containers + if containers == nil || len(containers) == 0 { + log.Printf("This pod container does not exist.") + return true + } + var mainContainers []corev1.Container + for _, c := range containers { + if c.Name != "" && c.Name == "main" { + mainContainers = append(mainContainers, c) + } + } + if len(mainContainers) != 1 { + return false + } + mainContainer := mainContainers[0] + return len(mainContainer.Command) != 0 && strings.HasSuffix(mainContainer.Command[len(mainContainer.Command)-1], TFXPodSuffix) +} diff --git a/backend/src/cache/server/mutation_test.go b/backend/src/cache/server/mutation_test.go new file mode 100644 index 00000000000..03b0f03bfd6 --- /dev/null +++ b/backend/src/cache/server/mutation_test.go @@ -0,0 +1,152 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package server + +import ( + "bytes" + "encoding/json" + "testing" + + "github.com/kubeflow/pipelines/backend/src/cache/model" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "k8s.io/api/admission/v1beta1" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/runtime" +) + +var ( + fakePod = &corev1.Pod{ + TypeMeta: metav1.TypeMeta{ + Kind: "Pod", + APIVersion: "v1", + }, + ObjectMeta: metav1.ObjectMeta{ + Annotations: map[string]string{ + ArgoWorkflowNodeName: "test_node", + ArgoWorkflowTemplate: `{"name": "test_template"}`, + }, + Labels: map[string]string{ + ArgoCompleteLabelKey: "true", + KFPCacheEnabledLabelKey: KFPCacheEnabledLabelValue, + }, + }, + Spec: corev1.PodSpec{ + Containers: []corev1.Container{ + corev1.Container{ + Name: "main", + Image: "test_image", + Command: []string{"python"}, + }, + }, + }, + } + fakeAdmissionRequest = v1beta1.AdmissionRequest{ + UID: "test-12345", + Kind: metav1.GroupVersionKind{ + Group: "group", + Version: "v1", + Kind: "k8s", + }, + Resource: metav1.GroupVersionResource{ + Version: "v1", + Resource: "pods", + }, + SubResource: "subresource", + Name: "test", + Namespace: "default", + Operation: "test", + Object: runtime.RawExtension{ + Raw: EncodePod(fakePod), + }, + } +) + +func EncodePod(pod *corev1.Pod) []byte { + reqBodyBytes := new(bytes.Buffer) + json.NewEncoder(reqBodyBytes).Encode(*pod) + + return reqBodyBytes.Bytes() +} + +func GetFakeRequestFromPod(pod *corev1.Pod) *v1beta1.AdmissionRequest { + fakeRequest := fakeAdmissionRequest + fakeRequest.Object.Raw = EncodePod(pod) + return &fakeRequest +} + +func TestMutatePodIfCachedWithErrorPodResource(t *testing.T) { + mockAdmissionRequest := &v1beta1.AdmissionRequest{ + Resource: metav1.GroupVersionResource{ + Version: "wrong", Resource: "wrong", + }, + } + patchOperations, err := MutatePodIfCached(mockAdmissionRequest, fakeClientManager) + assert.Nil(t, patchOperations) + assert.Nil(t, err) +} + +func TestMutatePodIfCachedWithDecodeError(t *testing.T) { + invalidAdmissionRequest := fakeAdmissionRequest + invalidAdmissionRequest.Object.Raw = []byte{5, 5} + patchOperation, err := MutatePodIfCached(&invalidAdmissionRequest, fakeClientManager) + assert.Nil(t, patchOperation) + assert.Contains(t, err.Error(), "could not deserialize pod object") +} + +func TestMutatePodIfCachedWithCacheDisabledPod(t *testing.T) { + cacheDisabledPod := *fakePod + cacheDisabledPod.ObjectMeta.Labels[KFPCacheEnabledLabelKey] = "false" + patchOperation, err := MutatePodIfCached(GetFakeRequestFromPod(&cacheDisabledPod), fakeClientManager) + assert.Nil(t, patchOperation) + assert.Nil(t, err) +} + +func TestMutatePodIfCachedWithTFXPod(t *testing.T) { + tfxPod := *fakePod + mainContainerCommand := append(tfxPod.Spec.Containers[0].Command, "/tfx-src/"+TFXPodSuffix) + tfxPod.Spec.Containers[0].Command = mainContainerCommand + patchOperation, err := MutatePodIfCached(GetFakeRequestFromPod(&tfxPod), fakeClientManager) + assert.Nil(t, patchOperation) + assert.Nil(t, err) +} + +func TestMutatePodIfCached(t *testing.T) { + patchOperation, err := MutatePodIfCached(&fakeAdmissionRequest, fakeClientManager) + assert.Nil(t, err) + require.NotNil(t, patchOperation) + require.Equal(t, 2, len(patchOperation)) + require.Equal(t, patchOperation[0].Op, OperationTypeAdd) + require.Equal(t, patchOperation[1].Op, OperationTypeAdd) +} + +func TestMutatePodIfCachedWithCacheEntryExist(t *testing.T) { + executionCache := &model.ExecutionCache{ + ExecutionCacheKey: "f98b62e4625b9f96bac478ac72d88181a37e4f1d6bfd3bd5f53e29286b2ca034", + ExecutionOutput: "testOutput", + ExecutionTemplate: `{"name": "test_template"}`, + MaxCacheStaleness: -1, + } + fakeClientManager.CacheStore().CreateExecutionCache(executionCache) + + patchOperation, err := MutatePodIfCached(&fakeAdmissionRequest, fakeClientManager) + assert.Nil(t, err) + require.NotNil(t, patchOperation) + require.Equal(t, 3, len(patchOperation)) + require.Equal(t, patchOperation[0].Op, OperationTypeReplace) + require.Equal(t, patchOperation[1].Op, OperationTypeAdd) + require.Equal(t, patchOperation[2].Op, OperationTypeAdd) +} diff --git a/backend/src/cache/server/watcher.go b/backend/src/cache/server/watcher.go new file mode 100644 index 00000000000..b53c0f4fc7b --- /dev/null +++ b/backend/src/cache/server/watcher.go @@ -0,0 +1,132 @@ +package server + +import ( + "encoding/json" + "fmt" + "log" + "reflect" + "strconv" + "time" + + "github.com/kubeflow/pipelines/backend/src/cache/client" + "github.com/kubeflow/pipelines/backend/src/cache/model" + "github.com/peterhellberg/duration" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/types" + "k8s.io/apimachinery/pkg/watch" +) + +const ( + ArgoCompleteLabelKey string = "workflows.argoproj.io/completed" + MetadataExecutionIDKey string = "pipelines.kubeflow.org/metadata_execution_id" + MaxCacheStalenessKey string = "pipelines.kubeflow.org/max_cache_staleness" +) + +func WatchPods(namespaceToWatch string, clientManager ClientManagerInterface) { + k8sCore := clientManager.KubernetesCoreClient() + + for { + listOptions := metav1.ListOptions{ + Watch: true, + LabelSelector: CacheIDLabelKey, + } + watcher, err := k8sCore.PodClient(namespaceToWatch).Watch(listOptions) + + if err != nil { + log.Printf("Watcher error:" + err.Error()) + } + + for event := range watcher.ResultChan() { + pod := reflect.ValueOf(event.Object).Interface().(*corev1.Pod) + if event.Type == watch.Error { + continue + } + log.Printf((*pod).GetName()) + + if !isPodCompletedAndSucceeded(pod) { + log.Printf("Pod %s is not completed or not in successful status.", pod.ObjectMeta.Name) + continue + } + + if isCacheWriten(pod.ObjectMeta.Labels) { + continue + } + + executionOutput, exists := pod.ObjectMeta.Annotations[ArgoWorkflowOutputs] + executionKey := pod.ObjectMeta.Annotations[ExecutionKey] + if !exists { + continue + } + + executionOutputMap := make(map[string]interface{}) + executionOutputMap[ArgoWorkflowOutputs] = executionOutput + executionOutputMap[MetadataExecutionIDKey] = pod.ObjectMeta.Labels[MetadataExecutionIDKey] + executionOutputJSON, _ := json.Marshal(executionOutputMap) + + executionMaxCacheStaleness, exists := pod.ObjectMeta.Annotations[MaxCacheStalenessKey] + var maxCacheStalenessInSeconds int64 = -1 + if exists { + maxCacheStalenessInSeconds = getMaxCacheStaleness(executionMaxCacheStaleness) + } + + executionTemplate := pod.ObjectMeta.Annotations[ArgoWorkflowTemplate] + executionToPersist := model.ExecutionCache{ + ExecutionCacheKey: executionKey, + ExecutionTemplate: executionTemplate, + ExecutionOutput: string(executionOutputJSON), + MaxCacheStaleness: maxCacheStalenessInSeconds, + } + + cacheEntryCreated, err := clientManager.CacheStore().CreateExecutionCache(&executionToPersist) + if err != nil { + log.Println("Unable to create cache entry.") + continue + } + err = patchCacheID(k8sCore, pod, namespaceToWatch, cacheEntryCreated.ID) + if err != nil { + log.Printf(err.Error()) + } + } + } +} + +func isPodCompletedAndSucceeded(pod *corev1.Pod) bool { + return pod.ObjectMeta.Labels[ArgoCompleteLabelKey] == "true" && pod.Status.Phase == corev1.PodSucceeded +} + +func isCacheWriten(labels map[string]string) bool { + cacheID := labels[CacheIDLabelKey] + return cacheID != "" +} + +func patchCacheID(k8sCore client.KubernetesCoreInterface, podToPatch *corev1.Pod, namespaceToWatch string, id int64) error { + labels := podToPatch.ObjectMeta.Labels + labels[CacheIDLabelKey] = strconv.FormatInt(id, 10) + log.Println(id) + var patchOps []patchOperation + patchOps = append(patchOps, patchOperation{ + Op: OperationTypeAdd, + Path: LabelPath, + Value: labels, + }) + patchBytes, err := json.Marshal(patchOps) + if err != nil { + return fmt.Errorf("Unable to patch cache_id to pod: %s", podToPatch.ObjectMeta.Name) + } + _, err = k8sCore.PodClient(namespaceToWatch).Patch(podToPatch.ObjectMeta.Name, types.JSONPatchType, patchBytes) + if err != nil { + return err + } + log.Printf("Cache id patched.") + return nil +} + +// Convert RFC3339 Duration(Eg. "P1DT30H4S") to int64 seconds. +func getMaxCacheStaleness(maxCacheStaleness string) int64 { + var seconds int64 = -1 + if d, err := duration.Parse(maxCacheStaleness); err == nil { + seconds = int64(d / time.Second) + } + return seconds +} diff --git a/backend/src/cache/storage/BUILD.bazel b/backend/src/cache/storage/BUILD.bazel new file mode 100644 index 00000000000..72c5088a0ae --- /dev/null +++ b/backend/src/cache/storage/BUILD.bazel @@ -0,0 +1,31 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +go_library( + name = "go_default_library", + srcs = [ + "db.go", + "db_fake.go", + "execution_cache_store.go", + ], + importpath = "github.com/kubeflow/pipelines/backend/src/cache/storage", + visibility = ["//visibility:public"], + deps = [ + "//backend/src/cache/model:go_default_library", + "//backend/src/common/util:go_default_library", + "@com_github_golang_glog//:go_default_library", + "@com_github_jinzhu_gorm//:go_default_library", + "@com_github_mattn_go_sqlite3//:go_default_library", + ], +) + +go_test( + name = "go_default_test", + srcs = ["execution_cache_store_test.go"], + embed = [":go_default_library"], + deps = [ + "//backend/src/cache/model:go_default_library", + "//backend/src/common/util:go_default_library", + "@com_github_stretchr_testify//assert:go_default_library", + "@com_github_stretchr_testify//require:go_default_library", + ], +) diff --git a/backend/src/apiserver/storage/object_store_util.go b/backend/src/cache/storage/db.go similarity index 62% rename from backend/src/apiserver/storage/object_store_util.go rename to backend/src/cache/storage/db.go index e3c94d877fd..4b8102832d3 100644 --- a/backend/src/apiserver/storage/object_store_util.go +++ b/backend/src/cache/storage/db.go @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,13 +14,18 @@ package storage -import "path" - -const ( - pipelineFolder = "pipelines" +import ( + "github.com/jinzhu/gorm" ) -// CreatePipelinePath creates object store path to a pipeline spec. -func CreatePipelinePath(pipelineID string) string { - return path.Join(pipelineFolder, pipelineID) +// DB a struct wrapping plain sql library with SQL dialect, to solve any feature +// difference between MySQL, which is used in production, and Sqlite, which is used +// for unit testing. +type DB struct { + *gorm.DB +} + +// NewDB creates a DB +func NewDB(db *gorm.DB) *DB { + return &DB{db} } diff --git a/backend/src/cache/storage/db_fake.go b/backend/src/cache/storage/db_fake.go new file mode 100644 index 00000000000..6733181e3da --- /dev/null +++ b/backend/src/cache/storage/db_fake.go @@ -0,0 +1,44 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "fmt" + + "github.com/golang/glog" + "github.com/jinzhu/gorm" + "github.com/kubeflow/pipelines/backend/src/cache/model" + _ "github.com/mattn/go-sqlite3" +) + +func NewFakeDb() (*DB, error) { + // Initialize GORM + db, err := gorm.Open("sqlite3", ":memory:") + if err != nil { + return nil, fmt.Errorf("Could not create the GORM database: %v", err) + } + // Create tables + db.AutoMigrate(&model.ExecutionCache{}) + + return NewDB(db), nil +} + +func NewFakeDbOrFatal() *DB { + db, err := NewFakeDb() + if err != nil { + glog.Fatalf("The fake DB doesn't create successfully. Fail fast.") + } + return db +} diff --git a/backend/src/cache/storage/execution_cache_store.go b/backend/src/cache/storage/execution_cache_store.go new file mode 100644 index 00000000000..301d9c1a65e --- /dev/null +++ b/backend/src/cache/storage/execution_cache_store.go @@ -0,0 +1,151 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "database/sql" + "fmt" + "log" + "strconv" + + model "github.com/kubeflow/pipelines/backend/src/cache/model" + "github.com/kubeflow/pipelines/backend/src/common/util" +) + +type ExecutionCacheStoreInterface interface { + GetExecutionCache(executionCacheKey string, maxCacheStaleness int64) (*model.ExecutionCache, error) + CreateExecutionCache(*model.ExecutionCache) (*model.ExecutionCache, error) + DeleteExecutionCache(executionCacheKey string) error +} + +type ExecutionCacheStore struct { + db *DB + time util.TimeInterface +} + +func (s *ExecutionCacheStore) GetExecutionCache(executionCacheKey string, maxCacheStaleness int64) (*model.ExecutionCache, error) { + if maxCacheStaleness == 0 { + return nil, fmt.Errorf("MaxCacheStaleness=0, Cache is disabled.") + } + r, err := s.db.Table("execution_caches").Where("ExecutionCacheKey = ?", executionCacheKey).Rows() + if err != nil { + return nil, fmt.Errorf("Failed to get execution cache: %q", executionCacheKey) + } + defer r.Close() + executionCaches, err := s.scanRows(r, maxCacheStaleness) + if err != nil { + return nil, fmt.Errorf("Failed to get execution cache: %q", executionCacheKey) + } + if len(executionCaches) == 0 { + return nil, fmt.Errorf("Execution cache not found with cache key: %q", executionCacheKey) + } + latestCache, err := getLatestCacheEntry(executionCaches) + if err != nil { + return nil, err + } + return latestCache, nil +} + +func (s *ExecutionCacheStore) scanRows(rows *sql.Rows, podMaxCacheStaleness int64) ([]*model.ExecutionCache, error) { + var executionCaches []*model.ExecutionCache + for rows.Next() { + var executionCacheKey, executionTemplate, executionOutput string + var id, maxCacheStaleness, startedAtInSec, endedAtInSec int64 + err := rows.Scan( + &id, + &executionCacheKey, + &executionTemplate, + &executionOutput, + &maxCacheStaleness, + &startedAtInSec, + &endedAtInSec) + if err != nil { + return executionCaches, nil + } + log.Println("Get id: " + strconv.FormatInt(id, 10)) + log.Println("Get template: " + executionTemplate) + if maxCacheStaleness == -1 || s.time.Now().UTC().Unix()-startedAtInSec <= podMaxCacheStaleness { + executionCaches = append(executionCaches, &model.ExecutionCache{ + ID: id, + ExecutionCacheKey: executionCacheKey, + ExecutionTemplate: executionTemplate, + ExecutionOutput: executionOutput, + MaxCacheStaleness: maxCacheStaleness, + StartedAtInSec: startedAtInSec, + EndedAtInSec: endedAtInSec, + }) + } + + } + return executionCaches, nil +} + +// Demo version will return the latest cache entry within same cache key. MaxCacheStaleness will +// be taken into consideration in the future. +func getLatestCacheEntry(executionCaches []*model.ExecutionCache) (*model.ExecutionCache, error) { + var latestCacheEntry *model.ExecutionCache + var maxStartedAtInSec int64 + for _, cache := range executionCaches { + if cache.StartedAtInSec >= maxStartedAtInSec { + latestCacheEntry = cache + maxStartedAtInSec = cache.StartedAtInSec + } + } + if latestCacheEntry == nil { + return nil, fmt.Errorf("No cache entry found.") + } + return latestCacheEntry, nil +} + +func (s *ExecutionCacheStore) CreateExecutionCache(executionCache *model.ExecutionCache) (*model.ExecutionCache, error) { + log.Println("Input cache: " + executionCache.ExecutionCacheKey) + newExecutionCache := *executionCache + log.Println("New cache key: " + newExecutionCache.ExecutionCacheKey) + now := s.time.Now().UTC().Unix() + + newExecutionCache.StartedAtInSec = now + // TODO: ended time need to be modified after demo version. + newExecutionCache.EndedAtInSec = now + + ok := s.db.NewRecord(newExecutionCache) + if !ok { + return nil, fmt.Errorf("Failed to create a new execution cache") + } + var rowInsert model.ExecutionCache + d := s.db.Create(&newExecutionCache).Scan(&rowInsert) + if d.Error != nil { + return nil, d.Error + } + log.Println("Cache entry created with cache key: " + newExecutionCache.ExecutionCacheKey) + log.Println(newExecutionCache.ExecutionTemplate) + log.Println(rowInsert.ID) + return &rowInsert, nil +} + +func (s *ExecutionCacheStore) DeleteExecutionCache(executionCacheID string) error { + db := s.db.Delete(&model.ExecutionCache{}, "ID = ?", executionCacheID) + if db.Error != nil { + return db.Error + } + return nil +} + +// factory function for execution cache store +func NewExecutionCacheStore(db *DB, time util.TimeInterface) *ExecutionCacheStore { + return &ExecutionCacheStore{ + db: db, + time: time, + } +} diff --git a/backend/src/cache/storage/execution_cache_store_test.go b/backend/src/cache/storage/execution_cache_store_test.go new file mode 100644 index 00000000000..c10babea63c --- /dev/null +++ b/backend/src/cache/storage/execution_cache_store_test.go @@ -0,0 +1,169 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package storage + +import ( + "testing" + + "github.com/kubeflow/pipelines/backend/src/cache/model" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createExecutionCache(cacheKey string, cacheOutput string) *model.ExecutionCache { + return &model.ExecutionCache{ + ExecutionCacheKey: cacheKey, + ExecutionTemplate: "testTemplate", + ExecutionOutput: cacheOutput, + MaxCacheStaleness: -1, + StartedAtInSec: 1, + EndedAtInSec: 1, + } +} + +func TestCreateExecutionCache(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) + executionCacheExpected := model.ExecutionCache{ + ID: 1, + ExecutionCacheKey: "test", + ExecutionTemplate: "testTemplate", + ExecutionOutput: "testOutput", + MaxCacheStaleness: -1, + StartedAtInSec: 1, + EndedAtInSec: 1, + } + executionCache := &model.ExecutionCache{ + ExecutionCacheKey: "test", + ExecutionTemplate: "testTemplate", + ExecutionOutput: "testOutput", + MaxCacheStaleness: -1, + } + executionCache, err := executionCacheStore.CreateExecutionCache(executionCache) + assert.Nil(t, err) + require.Equal(t, executionCacheExpected, *executionCache) +} + +func TestCreateExecutionCacheWithDuplicateRecord(t *testing.T) { + executionCache := &model.ExecutionCache{ + ID: 1, + ExecutionCacheKey: "test", + ExecutionTemplate: "testTemplate", + ExecutionOutput: "testOutput", + MaxCacheStaleness: -1, + StartedAtInSec: 1, + EndedAtInSec: 1, + } + db := NewFakeDbOrFatal() + defer db.Close() + executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) + executionCacheStore.CreateExecutionCache(executionCache) + cache, err := executionCacheStore.CreateExecutionCache(executionCache) + assert.Nil(t, cache) + assert.Contains(t, err.Error(), "Failed to create a new execution cache") +} + +func TestGetExecutionCache(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) + + executionCacheStore.CreateExecutionCache(createExecutionCache("testKey", "testOutput")) + executionCacheExpected := model.ExecutionCache{ + ID: 1, + ExecutionCacheKey: "testKey", + ExecutionTemplate: "testTemplate", + ExecutionOutput: "testOutput", + MaxCacheStaleness: -1, + StartedAtInSec: 1, + EndedAtInSec: 1, + } + + var executionCache *model.ExecutionCache + executionCache, err := executionCacheStore.GetExecutionCache("testKey", -1) + require.Nil(t, err) + require.Equal(t, &executionCacheExpected, executionCache) +} + +func TestGetExecutionCacheWithEmptyCacheEntry(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) + + executionCacheStore.CreateExecutionCache(createExecutionCache("testKey", "testOutput")) + var executionCache *model.ExecutionCache + executionCache, err := executionCacheStore.GetExecutionCache("wrongKey", -1) + require.Nil(t, executionCache) + require.Contains(t, err.Error(), `Execution cache not found with cache key: "wrongKey"`) +} + +func TestGetExecutionCacheWithLatestCacheEntry(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) + + executionCacheStore.CreateExecutionCache(createExecutionCache("testKey", "testOutput")) + executionCacheStore.CreateExecutionCache(createExecutionCache("testKey", "testOutput2")) + + executionCacheExpected := model.ExecutionCache{ + ID: 2, + ExecutionCacheKey: "testKey", + ExecutionTemplate: "testTemplate", + ExecutionOutput: "testOutput2", + MaxCacheStaleness: -1, + StartedAtInSec: 2, + EndedAtInSec: 2, + } + var executionCache *model.ExecutionCache + executionCache, err := executionCacheStore.GetExecutionCache("testKey", -1) + require.Nil(t, err) + require.Equal(t, &executionCacheExpected, executionCache) +} + +func TestGetExecutionCacheWithExpiredMaxCacheStaleness(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) + executionCacheToPersist := &model.ExecutionCache{ + ExecutionCacheKey: "testKey", + ExecutionTemplate: "testTemplate", + ExecutionOutput: "testOutput", + MaxCacheStaleness: 0, + } + executionCacheStore.CreateExecutionCache(executionCacheToPersist) + + var executionCache *model.ExecutionCache + executionCache, err := executionCacheStore.GetExecutionCache("testKey", -1) + require.Contains(t, err.Error(), "Execution cache not found") + require.Nil(t, executionCache) +} + +func TestDeleteExecutionCache(t *testing.T) { + db := NewFakeDbOrFatal() + defer db.Close() + executionCacheStore := NewExecutionCacheStore(db, util.NewFakeTimeForEpoch()) + executionCacheStore.CreateExecutionCache(createExecutionCache("testKey", "testOutput")) + executionCache, err := executionCacheStore.GetExecutionCache("testKey", -1) + assert.Nil(t, err) + assert.NotNil(t, executionCache) + + err = executionCacheStore.DeleteExecutionCache("1") + assert.Nil(t, err) + _, err = executionCacheStore.GetExecutionCache("testKey", -1) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "not found") +} diff --git a/backend/src/common/client/api_server/experiment_client.go b/backend/src/common/client/api_server/experiment_client.go index f246335797e..7080922b0b9 100644 --- a/backend/src/common/client/api_server/experiment_client.go +++ b/backend/src/common/client/api_server/experiment_client.go @@ -18,6 +18,8 @@ type ExperimentInterface interface { Get(params *params.GetExperimentParams) (*model.APIExperiment, error) List(params *params.ListExperimentParams) ([]*model.APIExperiment, int, string, error) ListAll(params *params.ListExperimentParams, maxResultSize int) ([]*model.APIExperiment, error) + Archive(params *params.ArchiveExperimentParams) error + Unarchive(params *params.UnarchiveExperimentParams) error } type ExperimentClient struct { @@ -164,3 +166,51 @@ func listAllForExperiment(client ExperimentInterface, parameters *params.ListExp return allResults, nil } + +func (c *ExperimentClient) Archive(parameters *params.ArchiveExperimentParams) error { + // Create context with timeout + ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) + defer cancel() + + // Make service call + parameters.Context = ctx + _, err := c.apiClient.ExperimentService.ArchiveExperiment(parameters, PassThroughAuth) + + if err != nil { + if defaultError, ok := err.(*params.ArchiveExperimentDefault); ok { + err = CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + } else { + err = CreateErrorCouldNotRecoverAPIStatus(err) + } + + return util.NewUserError(err, + fmt.Sprintf("Failed to archive experiments. Params: '%+v'", parameters), + fmt.Sprintf("Failed to archive experiments")) + } + + return nil +} + +func (c *ExperimentClient) Unarchive(parameters *params.UnarchiveExperimentParams) error { + // Create context with timeout + ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) + defer cancel() + + // Make service call + parameters.Context = ctx + _, err := c.apiClient.ExperimentService.UnarchiveExperiment(parameters, PassThroughAuth) + + if err != nil { + if defaultError, ok := err.(*params.UnarchiveExperimentDefault); ok { + err = CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + } else { + err = CreateErrorCouldNotRecoverAPIStatus(err) + } + + return util.NewUserError(err, + fmt.Sprintf("Failed to unarchive experiments. Params: '%+v'", parameters), + fmt.Sprintf("Failed to unarchive experiments")) + } + + return nil +} diff --git a/backend/src/common/client/api_server/experiment_client_fake.go b/backend/src/common/client/api_server/experiment_client_fake.go index 43f3779e8d8..d29eceef928 100644 --- a/backend/src/common/client/api_server/experiment_client_fake.go +++ b/backend/src/common/client/api_server/experiment_client_fake.go @@ -80,3 +80,11 @@ func (c *ExperimentClientFake) ListAll(params *experimentparams.ListExperimentPa maxResultSize int) ([]*experimentmodel.APIExperiment, error) { return listAllForExperiment(c, params, maxResultSize) } + +func (c *ExperimentClientFake) Archive(params *experimentparams.ArchiveExperimentParams) error { + return nil +} + +func (c *ExperimentClientFake) Unarchive(params *experimentparams.UnarchiveExperimentParams) error { + return nil +} diff --git a/backend/src/common/client/api_server/pipeline_client.go b/backend/src/common/client/api_server/pipeline_client.go index e2d3bd5b4bf..90d2d4a4d7d 100644 --- a/backend/src/common/client/api_server/pipeline_client.go +++ b/backend/src/common/client/api_server/pipeline_client.go @@ -202,3 +202,108 @@ func listAllForPipeline(client PipelineInterface, parameters *params.ListPipelin return allResults, nil } + +func (c *PipelineClient) CreatePipelineVersion(parameters *params.CreatePipelineVersionParams) (*model.APIPipelineVersion, + error) { + // Create context with timeout + ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) + defer cancel() + + parameters.Context = ctx + response, err := c.apiClient.PipelineService.CreatePipelineVersion(parameters, PassThroughAuth) + if err != nil { + if defaultError, ok := err.(*params.CreatePipelineVersionDefault); ok { + err = CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + } else { + err = CreateErrorCouldNotRecoverAPIStatus(err) + } + + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to create pipeline version. Params: '%v'", parameters), + fmt.Sprintf("Failed to create pipeline version from URL '%v'", parameters.Body.PackageURL.PipelineURL)) + } + + return response.Payload, nil +} + +func (c *PipelineClient) ListPipelineVersions(parameters *params.ListPipelineVersionsParams) ( + []*model.APIPipelineVersion, int, string, error) { + // Create context with timeout + ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) + defer cancel() + + // Make service call + parameters.Context = ctx + response, err := c.apiClient.PipelineService.ListPipelineVersions(parameters, PassThroughAuth) + if err != nil { + if defaultError, ok := err.(*params.ListPipelineVersionsDefault); ok { + err = CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + } else { + err = CreateErrorCouldNotRecoverAPIStatus(err) + } + + return nil, 0, "", util.NewUserError(err, + fmt.Sprintf("Failed to list pipeline versions. Params: '%+v'", parameters), + fmt.Sprintf("Failed to list pipeline versions")) + } + + return response.Payload.Versions, int(response.Payload.TotalSize), response.Payload.NextPageToken, nil +} + +func (c *PipelineClient) GetPipelineVersion(parameters *params.GetPipelineVersionParams) (*model.APIPipelineVersion, + error) { + // Create context with timeout + ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) + defer cancel() + + // Make service call + parameters.Context = ctx + response, err := c.apiClient.PipelineService.GetPipelineVersion(parameters, PassThroughAuth) + if err != nil { + if defaultError, ok := err.(*params.GetPipelineVersionDefault); ok { + err = CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + } else { + err = CreateErrorCouldNotRecoverAPIStatus(err) + } + + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to get pipeline version. Params: '%v'", parameters), + fmt.Sprintf("Failed to get pipeline version '%v'", parameters.VersionID)) + } + + return response.Payload, nil +} + +func (c *PipelineClient) GetPipelineVersionTemplate(parameters *params.GetPipelineVersionTemplateParams) ( + *workflowapi.Workflow, error) { + // Create context with timeout + ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) + defer cancel() + + // Make service call + parameters.Context = ctx + response, err := c.apiClient.PipelineService.GetPipelineVersionTemplate(parameters, PassThroughAuth) + if err != nil { + if defaultError, ok := err.(*params.GetPipelineVersionTemplateDefault); ok { + err = CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + } else { + err = CreateErrorCouldNotRecoverAPIStatus(err) + } + + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to get template. Params: '%+v'", parameters), + fmt.Sprintf("Failed to get template for pipeline version '%v'", parameters.VersionID)) + } + + // Unmarshal response + var workflow workflowapi.Workflow + err = yaml.Unmarshal([]byte(response.Payload.Template), &workflow) + if err != nil { + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to unmarshal reponse. Params: '%+v'. Response: '%s'", parameters, + response.Payload.Template), + fmt.Sprintf("Failed to unmarshal reponse")) + } + + return &workflow, nil +} diff --git a/backend/src/common/client/api_server/pipeline_upload_client.go b/backend/src/common/client/api_server/pipeline_upload_client.go index 0277bd56a28..476745b8577 100644 --- a/backend/src/common/client/api_server/pipeline_upload_client.go +++ b/backend/src/common/client/api_server/pipeline_upload_client.go @@ -20,6 +20,7 @@ const ( pipelineUploadPath = "pipelines/upload" pipelineUploadServerBasePath = "/api/v1/namespaces/%s/services/ml-pipeline:8888/proxy/apis/v1beta1/%s" pipelineUploadContentTypeKey = "Content-Type" + pipelineVersionUploadPath = "pipelines/upload_version" ) type PipelineUploadInterface interface { @@ -83,3 +84,38 @@ func (c *PipelineUploadClient) Upload(parameters *params.UploadPipelineParams) ( return response.Payload, nil } + +// UploadPipelineVersion uploads pipeline version from local file. +func (c *PipelineUploadClient) UploadPipelineVersion(filePath string, parameters *params.UploadPipelineVersionParams) (*model.APIPipelineVersion, + error) { + // Get file + file, err := os.Open(filePath) + if err != nil { + return nil, util.NewUserErrorWithSingleMessage(err, + fmt.Sprintf("Failed to open file '%s'", filePath)) + } + defer file.Close() + parameters.Uploadfile = runtime.NamedReader(filePath, file) + + // Create context with timeout + ctx, cancel := context.WithTimeout(context.Background(), apiServerDefaultTimeout) + defer cancel() + + // Make service call + parameters.Context = ctx + response, err := c.apiClient.PipelineUploadService.UploadPipelineVersion(parameters, PassThroughAuth) + + if err != nil { + if defaultError, ok := err.(*params.UploadPipelineVersionDefault); ok { + err = CreateErrorFromAPIStatus(defaultError.Payload.Error, defaultError.Payload.Code) + } else { + err = CreateErrorCouldNotRecoverAPIStatus(err) + } + + return nil, util.NewUserError(err, + fmt.Sprintf("Failed to upload pipeline version. Params: '%v'", parameters), + fmt.Sprintf("Failed to upload pipeline version")) + } + + return response.Payload, nil +} diff --git a/backend/src/common/client/api_server/pipeline_upload_client_fake.go b/backend/src/common/client/api_server/pipeline_upload_client_fake.go index fd9e0230a26..1a274b62ae8 100644 --- a/backend/src/common/client/api_server/pipeline_upload_client_fake.go +++ b/backend/src/common/client/api_server/pipeline_upload_client_fake.go @@ -44,3 +44,6 @@ func (c *PipelineUploadClientFake) UploadFile(filePath string, return getDefaultUploadedPipeline(), nil } } + +// TODO(jingzhang36): add UploadPipelineVersion fake to be used in integration test +// after go_http_client and go_client are auto-generated from UploadPipelineVersion in PipelineUploadServer diff --git a/backend/src/common/util/BUILD.bazel b/backend/src/common/util/BUILD.bazel index b1925073e31..0c667a360f2 100644 --- a/backend/src/common/util/BUILD.bazel +++ b/backend/src/common/util/BUILD.bazel @@ -63,7 +63,6 @@ go_test( "//backend/src/crd/pkg/apis/scheduledworkflow/v1beta1:go_default_library", "@com_github_argoproj_argo//pkg/apis/workflow/v1alpha1:go_default_library", "@com_github_ghodss_yaml//:go_default_library", - "@com_github_kataras_iris//core/errors:go_default_library", "@com_github_stretchr_testify//assert:go_default_library", "@io_k8s_apimachinery//pkg/api/errors:go_default_library", "@io_k8s_apimachinery//pkg/apis/meta/v1:go_default_library", diff --git a/backend/src/common/util/consts.go b/backend/src/common/util/consts.go index e51dc4fa94b..398ba7e117c 100644 --- a/backend/src/common/util/consts.go +++ b/backend/src/common/util/consts.go @@ -40,7 +40,19 @@ const ( // It captures whether the name of the owning ScheduledWorkflow. LabelKeyWorkflowScheduledWorkflowName = constants.FullName + "/scheduledWorkflowName" - - LabelKeyWorkflowRunId = "pipeline/runid" + LabelKeyWorkflowRunId = "pipeline/runid" LabelKeyWorkflowPersistedFinalState = "pipeline/persistedFinalState" + + // LabelKeyWorkflowEpoch is a Workflow annotation key. + // It captures the the name of the Run. + AnnotationKeyRunName = "pipelines.kubeflow.org/run_name" + + AnnotationKeyIstioSidecarInject = "sidecar.istio.io/inject" + AnnotationValueIstioSidecarInjectEnabled = "true" + AnnotationValueIstioSidecarInjectDisabled = "false" + + // LabelKeyCacheEnabled is a workflow label key. + // It captures whether this step will be selected by cache service. + // To disable/enable cache for a single run, this label needs to be added in every step under a run. + LabelKeyCacheEnabled = "pipelines.kubeflow.org/cache_enabled" ) diff --git a/backend/src/common/util/formatter_test.go b/backend/src/common/util/formatter_test.go index 61376b5e8d7..28db64bb2e3 100644 --- a/backend/src/common/util/formatter_test.go +++ b/backend/src/common/util/formatter_test.go @@ -15,11 +15,11 @@ package util import ( + "errors" "testing" "time" "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" - "github.com/kataras/iris/core/errors" "github.com/stretchr/testify/assert" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) diff --git a/backend/src/common/util/scheduled_workflow.go b/backend/src/common/util/scheduled_workflow.go index d3fc9743ac1..41d971dfc89 100644 --- a/backend/src/common/util/scheduled_workflow.go +++ b/backend/src/common/util/scheduled_workflow.go @@ -80,6 +80,13 @@ func (s *ScheduledWorkflow) MaxConcurrencyOr0() int64 { return 0 } +func (s *ScheduledWorkflow) NoCatchupOrFalse() bool { + if s.Spec.NoCatchup != nil { + return *s.Spec.NoCatchup + } + return false +} + func (s *ScheduledWorkflow) IntervalSecondOr0() int64 { if s.Spec.PeriodicSchedule != nil { return s.Spec.PeriodicSchedule.IntervalSecond diff --git a/backend/src/common/util/workflow.go b/backend/src/common/util/workflow.go index dc5acb967d6..2db66fedee3 100644 --- a/backend/src/common/util/workflow.go +++ b/backend/src/common/util/workflow.go @@ -15,6 +15,8 @@ package util import ( + "strings" + workflowapi "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" "github.com/golang/glog" swfregister "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow" @@ -22,7 +24,6 @@ import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/util/json" - "strings" ) // Workflow is a type to help manipulate Workflow objects. @@ -115,8 +116,8 @@ func isScheduledWorkflow(reference metav1.OwnerReference) bool { } if reference.APIVersion == gvk.GroupVersion().String() && - reference.Kind == gvk.Kind && - reference.UID != "" { + reference.Kind == gvk.Kind && + reference.UID != "" { return true } return false @@ -186,6 +187,34 @@ func (w *Workflow) OverrideName(name string) { w.Name = name } +// SetAnnotations sets annotations on all templates in a Workflow +func (w *Workflow) SetAnnotationsToAllTemplates(key string, value string) { + if len(w.Spec.Templates) == 0 { + return + } + for index, _ := range w.Spec.Templates { + if w.Spec.Templates[index].Metadata.Annotations == nil { + w.Spec.Templates[index].Metadata.Annotations = make(map[string]string) + } + w.Spec.Templates[index].Metadata.Annotations[key] = value + } +} + +// SetLabels sets labels on all templates in a Workflow +func (w *Workflow) SetLabelsToAllTemplates(key string, value string) { + if len(w.Spec.Templates) == 0 { + return + } + for index, _ := range w.Spec.Templates { + if w.Spec.Templates[index].Metadata.Labels == nil { + w.Spec.Templates[index].Metadata.Labels = make(map[string]string) + } + if w.Spec.Templates[index].Metadata.Labels[key] != value { + w.Spec.Templates[index].Metadata.Labels[key] = value + } + } +} + // SetOwnerReferences sets owner references on a Workflow. func (w *Workflow) SetOwnerReferences(schedule *swfapi.ScheduledWorkflow) { w.OwnerReferences = []metav1.OwnerReference{ @@ -204,6 +233,13 @@ func (w *Workflow) SetLabels(key string, value string) { w.Labels[key] = value } +func (w *Workflow) SetAnnotations(key string, value string) { + if w.Annotations == nil { + w.Annotations = make(map[string]string) + } + w.Annotations[key] = value +} + func (w *Workflow) ReplaceUID(id string) error { newWorkflowString := strings.Replace(w.ToStringForStore(), "{{workflow.uid}}", id, -1) var workflow *workflowapi.Workflow diff --git a/backend/src/common/util/workflow_test.go b/backend/src/common/util/workflow_test.go index 26bfbcacd48..30280e9e295 100644 --- a/backend/src/common/util/workflow_test.go +++ b/backend/src/common/util/workflow_test.go @@ -15,9 +15,10 @@ package util import ( - "github.com/ghodss/yaml" "testing" + workflowapi "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" + "github.com/ghodss/yaml" swfapi "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1" "github.com/stretchr/testify/assert" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -252,6 +253,40 @@ func TestWorkflow_SetOwnerReferences(t *testing.T) { assert.Equal(t, expected, workflow.Get()) } +func TestWorkflow_SetLabelsToAllTemplates(t *testing.T) { + workflow := NewWorkflow(&workflowapi.Workflow{ + ObjectMeta: metav1.ObjectMeta{ + Name: "WORKFLOW_NAME", + }, + Spec: workflowapi.WorkflowSpec{ + Templates: []workflowapi.Template{ + workflowapi.Template{ + Metadata: workflowapi.Metadata{}, + }, + }, + }, + }) + workflow.SetLabelsToAllTemplates("key", "value") + expected := &workflowapi.Workflow{ + ObjectMeta: metav1.ObjectMeta{ + Name: "WORKFLOW_NAME", + }, + Spec: workflowapi.WorkflowSpec{ + Templates: []workflowapi.Template{ + workflowapi.Template{ + Metadata: workflowapi.Metadata{ + Labels: map[string]string{ + "key": "value", + }, + }, + }, + }, + }, + } + + assert.Equal(t, expected, workflow.Get()) +} + func TestSetLabels(t *testing.T) { workflow := NewWorkflow(&workflowapi.Workflow{ ObjectMeta: metav1.ObjectMeta{ diff --git a/backend/src/crd/controller/scheduledworkflow/util/cron_schedule.go b/backend/src/crd/controller/scheduledworkflow/util/cron_schedule.go index 52b74b29101..d6e9d2f2949 100644 --- a/backend/src/crd/controller/scheduledworkflow/util/cron_schedule.go +++ b/backend/src/crd/controller/scheduledworkflow/util/cron_schedule.go @@ -44,16 +44,39 @@ func NewCronSchedule(cronSchedule *swfapi.CronSchedule) *CronSchedule { // scheduled. func (s *CronSchedule) GetNextScheduledEpoch(lastJobEpoch *int64, defaultStartEpoch int64) int64 { + effectiveLastJobEpoch := s.getEffectiveLastJobEpoch(lastJobEpoch, defaultStartEpoch) + return s.getNextScheduledEpoch(effectiveLastJobEpoch) +} + +func (s *CronSchedule) GetNextScheduledEpochNoCatchup(lastJobEpoch *int64, + defaultStartEpoch int64, nowEpoch int64) int64 { + + effectiveLastJobEpoch := s.getEffectiveLastJobEpoch(lastJobEpoch, defaultStartEpoch) + return s.getNextScheduledEpochImp(effectiveLastJobEpoch, false, nowEpoch) +} + +func (s *CronSchedule) getEffectiveLastJobEpoch(lastJobEpoch *int64, + defaultStartEpoch int64) int64 { + + // Fallback to default start epoch, which will be passed the Job creation + // time. effectiveLastJobEpoch := defaultStartEpoch if lastJobEpoch != nil { + // Last job epoch takes first precedence. effectiveLastJobEpoch = *lastJobEpoch } else if s.StartTime != nil { + // Start time takes second precedence. effectiveLastJobEpoch = s.StartTime.Unix() } - return s.getNextScheduledEpoch(effectiveLastJobEpoch) + return effectiveLastJobEpoch } func (s *CronSchedule) getNextScheduledEpoch(lastJobEpoch int64) int64 { + return s.getNextScheduledEpochImp(lastJobEpoch, + true, 0 /* nowEpoch doesn't matter when catchup=true */) +} + +func (s *CronSchedule) getNextScheduledEpochImp(lastJobEpoch int64, catchup bool, nowEpoch int64) int64 { schedule, err := cron.Parse(s.Cron) if err != nil { // This should never happen, validation should have caught this at resource creation. @@ -68,10 +91,30 @@ func (s *CronSchedule) getNextScheduledEpoch(lastJobEpoch int64) int64 { } result := schedule.Next(time.Unix(startEpoch, 0).UTC()).Unix() - if s.EndTime != nil && - s.EndTime.Unix() < result { + var endTime int64 = math.MaxInt64 + if s.EndTime != nil { + endTime = s.EndTime.Unix() + } + if endTime < result { return math.MaxInt64 } - return result + // When we need to catch up with schedule, just run schedules one by one. + if catchup == true { + return result + } + + // When we don't need to catch up, find the last schedule we need to run + // now and skip others in between. + next := result + var nextNext int64 + for { + nextNext = schedule.Next(time.Unix(next, 0).UTC()).Unix() + if nextNext <= nowEpoch && nextNext <= endTime { + next = nextNext + } else { + break + } + } + return next } diff --git a/backend/src/crd/controller/scheduledworkflow/util/cron_schedule_test.go b/backend/src/crd/controller/scheduledworkflow/util/cron_schedule_test.go index dac87a00865..c3c04e1479f 100644 --- a/backend/src/crd/controller/scheduledworkflow/util/cron_schedule_test.go +++ b/backend/src/crd/controller/scheduledworkflow/util/cron_schedule_test.go @@ -110,3 +110,54 @@ func TestCronSchedule_GetNextScheduledEpoch(t *testing.T) { assert.Equal(t, int64(10*hour+15*minute+minute), schedule.GetNextScheduledEpoch(nil, defaultStartEpoch)) } + +func TestCronSchedule_GetNextScheduledEpochNoCatchup(t *testing.T) { + // There was a previous job, hasn't been time for next job + schedule := NewCronSchedule(&swfapi.CronSchedule{ + StartTime: commonutil.Metav1TimePointer(v1.NewTime(time.Unix(10*hour+10*minute, 0).UTC())), + EndTime: commonutil.Metav1TimePointer(v1.NewTime(time.Unix(11*hour, 0).UTC())), + Cron: "0 * * * * * ", + }) + lastJobEpoch := int64(10*hour + 20*minute) + defaultStartEpoch := int64(10*hour + 15*minute) + nowEpoch := int64(10*hour + 20*minute + 30*second) + assert.Equal(t, int64(10*hour+20*minute+minute), + schedule.GetNextScheduledEpochNoCatchup(&lastJobEpoch, defaultStartEpoch, nowEpoch)) + + // Exactly now for next job + lastJobEpoch = int64(10*hour + 20*minute) + nowEpoch = int64(10*hour + 20*minute + minute) + assert.Equal(t, int64(10*hour+20*minute+minute), + schedule.GetNextScheduledEpochNoCatchup(&lastJobEpoch, defaultStartEpoch, nowEpoch)) + + // Shortly after next job's original schedule + lastJobEpoch = int64(10*hour + 20*minute) + nowEpoch = int64(10*hour + 21*minute + 30*second) + assert.Equal(t, int64(10*hour+21*minute), + schedule.GetNextScheduledEpochNoCatchup(&lastJobEpoch, defaultStartEpoch, nowEpoch)) + + // We are behind schedule + lastJobEpoch = int64(10*hour + 20*minute) + nowEpoch = int64(10*hour + 30*minute) + assert.Equal(t, int64(10*hour+30*minute), + schedule.GetNextScheduledEpochNoCatchup(&lastJobEpoch, defaultStartEpoch, nowEpoch)) + + // We are way behind schedule (later than end time) + lastJobEpoch = int64(10*hour + 20*minute) + nowEpoch = int64(12 * hour) + assert.Equal(t, int64(11*hour), + schedule.GetNextScheduledEpochNoCatchup(&lastJobEpoch, defaultStartEpoch, nowEpoch)) + + // There is no previous job, falling back on the start date of the schedule + assert.Equal(t, int64(10*hour+10*minute+minute), + schedule.GetNextScheduledEpochNoCatchup(nil, defaultStartEpoch, 0)) + + // There is no previous job, no schedule start date, falling back on the + // creation date of the workflow. + schedule = NewCronSchedule(&swfapi.CronSchedule{ + EndTime: commonutil.Metav1TimePointer(v1.NewTime(time.Unix(11*hour, 0).UTC())), + Cron: "0 * * * * * ", + }) + assert.Equal(t, int64(10*hour+15*minute+minute), + schedule.GetNextScheduledEpochNoCatchup(nil, defaultStartEpoch, 0)) +} diff --git a/backend/src/crd/controller/scheduledworkflow/util/periodic_schedule.go b/backend/src/crd/controller/scheduledworkflow/util/periodic_schedule.go index 6d18abad1e9..df75f85eaa4 100644 --- a/backend/src/crd/controller/scheduledworkflow/util/periodic_schedule.go +++ b/backend/src/crd/controller/scheduledworkflow/util/periodic_schedule.go @@ -56,17 +56,41 @@ func (s *PeriodicSchedule) getNextScheduledEpoch(lastJobEpoch int64) int64 { startEpoch = s.StartTime.Unix() } + result := startEpoch + s.getInterval() + + if s.EndTime != nil && + s.EndTime.Unix() < result { + return math.MaxInt64 + } + + return result +} + +func (s *PeriodicSchedule) getInterval() int64 { interval := s.IntervalSecond if interval == 0 { interval = 1 } + return interval +} - result := startEpoch + interval +func (s *PeriodicSchedule) GetNextScheduledEpochNoCatchup( + lastJobEpoch *int64, defaultStartEpoch int64, nowEpoch int64) int64 { - if s.EndTime != nil && - s.EndTime.Unix() < result { + nextScheduledEpoch := s.GetNextScheduledEpoch(lastJobEpoch, defaultStartEpoch) + if nextScheduledEpoch == math.MaxInt64 { + // No next schedule. return math.MaxInt64 } - return result + nextNextScheduledEpoch := nextScheduledEpoch + s.getInterval() + + if nowEpoch >= nextNextScheduledEpoch { + // If we cannot catch up with schedule, just reschedule to min(now, endTime). + if s.EndTime != nil && s.EndTime.Unix() < nowEpoch { + return s.EndTime.Unix() + } + return nowEpoch + } + return nextScheduledEpoch } diff --git a/backend/src/crd/controller/scheduledworkflow/util/periodic_schedule_test.go b/backend/src/crd/controller/scheduledworkflow/util/periodic_schedule_test.go index ce1c7905252..96d3fed6534 100644 --- a/backend/src/crd/controller/scheduledworkflow/util/periodic_schedule_test.go +++ b/backend/src/crd/controller/scheduledworkflow/util/periodic_schedule_test.go @@ -93,3 +93,54 @@ func TestPeriodicSchedule_GetNextScheduledEpoch(t *testing.T) { assert.Equal(t, int64(10*hour+15*minute+minute), schedule.GetNextScheduledEpoch(nil, defaultStartEpoch)) } + +func TestPeriodicSchedule_GetNextScheduledEpochNoCatchup(t *testing.T) { + // There was a previous job, hasn't been time for next job + schedule := NewPeriodicSchedule(&swfapi.PeriodicSchedule{ + StartTime: commonutil.Metav1TimePointer(v1.NewTime(time.Unix(10*hour+10*minute, 0).UTC())), + EndTime: commonutil.Metav1TimePointer(v1.NewTime(time.Unix(11*hour, 0).UTC())), + IntervalSecond: 60, + }) + lastJobEpoch := int64(10*hour + 20*minute) + defaultStartEpoch := int64(10*hour + 15*minute) + nowEpoch := int64(10*hour + 20*minute + 30*second) + assert.Equal(t, int64(10*hour+20*minute+minute), + schedule.GetNextScheduledEpochNoCatchup(&lastJobEpoch, defaultStartEpoch, nowEpoch)) + + // Exactly now for next job + lastJobEpoch = int64(10*hour + 20*minute) + nowEpoch = int64(10*hour + 20*minute + minute) + assert.Equal(t, int64(10*hour+20*minute+minute), + schedule.GetNextScheduledEpochNoCatchup(&lastJobEpoch, defaultStartEpoch, nowEpoch)) + + // Shortly after next job's original schedule + lastJobEpoch = int64(10*hour + 20*minute) + nowEpoch = int64(10*hour + 21*minute + 30*second) + assert.Equal(t, int64(10*hour+21*minute), + schedule.GetNextScheduledEpochNoCatchup(&lastJobEpoch, defaultStartEpoch, nowEpoch)) + + // We are behind schedule + lastJobEpoch = int64(10*hour + 20*minute) + nowEpoch = int64(10*hour + 30*minute) + assert.Equal(t, nowEpoch, + schedule.GetNextScheduledEpochNoCatchup(&lastJobEpoch, defaultStartEpoch, nowEpoch)) + + // We are way behind schedule (later than end time) + lastJobEpoch = int64(10*hour + 20*minute) + nowEpoch = int64(12 * hour) + assert.Equal(t, int64(11*hour), // TODO: should this be end time? + schedule.GetNextScheduledEpochNoCatchup(&lastJobEpoch, defaultStartEpoch, nowEpoch)) + + // There is no previous job, falling back on the start date of the schedule + assert.Equal(t, int64(10*hour+10*minute+minute), + schedule.GetNextScheduledEpochNoCatchup(nil, defaultStartEpoch, 0)) + + // There is no previous job, no schedule start date, falling back on the + // creation date of the workflow. + schedule = NewPeriodicSchedule(&swfapi.PeriodicSchedule{ + EndTime: commonutil.Metav1TimePointer(v1.NewTime(time.Unix(11*hour, 0).UTC())), + IntervalSecond: 60, + }) + assert.Equal(t, int64(10*hour+15*minute+minute), + schedule.GetNextScheduledEpochNoCatchup(nil, defaultStartEpoch, 0)) +} diff --git a/backend/src/crd/controller/scheduledworkflow/util/scheduled_workflow.go b/backend/src/crd/controller/scheduledworkflow/util/scheduled_workflow.go index f96dac21c10..c0e3123672f 100644 --- a/backend/src/crd/controller/scheduledworkflow/util/scheduled_workflow.go +++ b/backend/src/crd/controller/scheduledworkflow/util/scheduled_workflow.go @@ -45,7 +45,7 @@ type ScheduledWorkflow struct { } // NewScheduledWorkflow creates an instance of ScheduledWorkflow. -func NewScheduledWorkflow(swf *swfapi.ScheduledWorkflow, ) *ScheduledWorkflow { +func NewScheduledWorkflow(swf *swfapi.ScheduledWorkflow) *ScheduledWorkflow { return &ScheduledWorkflow{ swf, commonutil.NewUUIDGenerator(), } @@ -124,7 +124,7 @@ func (s *ScheduledWorkflow) MinIndex() int64 { func (s *ScheduledWorkflow) isOneOffRun() bool { return s.Spec.Trigger.CronSchedule == nil && - s.Spec.Trigger.PeriodicSchedule == nil + s.Spec.Trigger.PeriodicSchedule == nil } func (s *ScheduledWorkflow) nextResourceID() string { @@ -149,7 +149,7 @@ func (s *ScheduledWorkflow) getWorkflowParametersAsMap() map[string]string { } func (s *ScheduledWorkflow) getFormattedWorkflowParametersAsMap( - formatter *ParameterFormatter) map[string]string { + formatter *ParameterFormatter) map[string]string { result := make(map[string]string) for key, value := range s.getWorkflowParametersAsMap() { @@ -163,7 +163,7 @@ func (s *ScheduledWorkflow) getFormattedWorkflowParametersAsMap( // the appropriate OwnerReferences on the resource so handleObject can discover // the Schedule resource that 'owns' it. func (s *ScheduledWorkflow) NewWorkflow( - nextScheduledEpoch int64, nowEpoch int64) (*commonutil.Workflow, error) { + nextScheduledEpoch int64, nowEpoch int64) (*commonutil.Workflow, error) { const ( workflowKind = "Workflow" @@ -208,10 +208,10 @@ func (s *ScheduledWorkflow) NewWorkflow( // GetNextScheduledEpoch returns the next epoch at which a workflow should be scheduled, // and whether it should be run now. func (s *ScheduledWorkflow) GetNextScheduledEpoch(activeWorkflowCount int64, nowEpoch int64) ( - nextScheduleEpoch int64, shouldRunNow bool) { + nextScheduleEpoch int64, shouldRunNow bool) { // Get the next scheduled time. - nextScheduledEpoch := s.getNextScheduledEpoch() + nextScheduledEpoch := s.getNextScheduledEpoch(nowEpoch) // If the schedule is not enabled, we should not schedule the workflow now. if s.enabled() == false { @@ -231,21 +231,37 @@ func (s *ScheduledWorkflow) GetNextScheduledEpoch(activeWorkflowCount int64, now return nextScheduledEpoch, true } -func (s *ScheduledWorkflow) getNextScheduledEpoch() int64 { +func (s *ScheduledWorkflow) getNextScheduledEpoch(nowEpoch int64) int64 { + catchup := true + if s.Spec.NoCatchup != nil { + catchup = !*s.Spec.NoCatchup + } // Periodic schedule if s.Spec.Trigger.PeriodicSchedule != nil { - return NewPeriodicSchedule(s.Spec.Trigger.PeriodicSchedule). - GetNextScheduledEpoch( - commonutil.ToInt64Pointer(s.Status.Trigger.LastTriggeredTime), - s.creationEpoch()) + schedule := NewPeriodicSchedule(s.Spec.Trigger.PeriodicSchedule) + if catchup { + return schedule.GetNextScheduledEpoch( + commonutil.ToInt64Pointer(s.Status.Trigger.LastTriggeredTime), + s.creationEpoch()) + } else { + return schedule.GetNextScheduledEpochNoCatchup( + commonutil.ToInt64Pointer(s.Status.Trigger.LastTriggeredTime), + s.creationEpoch(), nowEpoch) + } } // Cron schedule if s.Spec.Trigger.CronSchedule != nil { - return NewCronSchedule(s.Spec.Trigger.CronSchedule). - GetNextScheduledEpoch( - commonutil.ToInt64Pointer(s.Status.Trigger.LastTriggeredTime), - s.creationEpoch()) + schedule := NewCronSchedule(s.Spec.Trigger.CronSchedule) + if catchup { + return schedule.GetNextScheduledEpoch( + commonutil.ToInt64Pointer(s.Status.Trigger.LastTriggeredTime), + s.creationEpoch()) + } else { + return schedule.GetNextScheduledEpochNoCatchup( + commonutil.ToInt64Pointer(s.Status.Trigger.LastTriggeredTime), + s.creationEpoch(), nowEpoch) + } } return s.getNextScheduledEpochForOneTimeRun() @@ -268,8 +284,8 @@ func (s *ScheduledWorkflow) setLabel(key string, value string) { // UpdateStatus updates the status of a workflow in the Kubernetes API server. func (s *ScheduledWorkflow) UpdateStatus(updatedEpoch int64, workflow *commonutil.Workflow, - scheduledEpoch int64, active []swfapi.WorkflowStatus, - completed []swfapi.WorkflowStatus) { + scheduledEpoch int64, active []swfapi.WorkflowStatus, + completed []swfapi.WorkflowStatus) { updatedTime := metav1.NewTime(time.Unix(updatedEpoch, 0).UTC()) @@ -310,7 +326,8 @@ func (s *ScheduledWorkflow) UpdateStatus(updatedEpoch int64, workflow *commonuti if workflow != nil { s.updateLastTriggeredTime(scheduledEpoch) s.Status.Trigger.LastIndex = commonutil.Int64Pointer(s.nextIndex()) - s.updateNextTriggeredTime(s.getNextScheduledEpoch()) + // Next triggered time in label should not be related to now epoch. + s.updateNextTriggeredTime(s.getNextScheduledEpoch(0)) } else { // LastTriggeredTime is unchanged. s.updateNextTriggeredTime(scheduledEpoch) @@ -333,8 +350,8 @@ func (s *ScheduledWorkflow) updateNextTriggeredTime(epoch int64) { } func (s *ScheduledWorkflow) getStatusAndMessage(activeCount int) ( - conditionType swfapi.ScheduledWorkflowConditionType, - status core.ConditionStatus, message string) { + conditionType swfapi.ScheduledWorkflowConditionType, + status core.ConditionStatus, message string) { // Schedule messages const ( ScheduleEnabledMessage = "The schedule is enabled." diff --git a/backend/src/crd/controller/viewer/reconciler/reconciler.go b/backend/src/crd/controller/viewer/reconciler/reconciler.go index a6287e651bc..63f6777b6b6 100644 --- a/backend/src/crd/controller/viewer/reconciler/reconciler.go +++ b/backend/src/crd/controller/viewer/reconciler/reconciler.go @@ -24,6 +24,7 @@ package reconciler import ( "context" "fmt" + "strings" "github.com/golang/glog" viewerV1beta1 "github.com/kubeflow/pipelines/backend/src/crd/pkg/apis/viewer/v1beta1" @@ -42,6 +43,8 @@ import ( const viewerTargetPort = 6006 +const defaultTensorflowImage = "tensorflow/tensorflow:1.13.2" + // Reconciler implements reconcile.Reconciler for the Viewer CRD. type Reconciler struct { client.Client @@ -90,6 +93,10 @@ func (r *Reconciler) Reconcile(req reconcile.Request) (reconcile.Result, error) return reconcile.Result{}, nil } + if len(view.Spec.TensorboardSpec.TensorflowImage) == 0 { + view.Spec.TensorboardSpec.TensorflowImage = defaultTensorflowImage + } + // Check and maybe delete the oldest viewer before creating the next one. if err := r.maybeDeleteOldestViewer(view.Spec.Type, view.Namespace); err != nil { // Couldn't delete. Requeue. @@ -165,12 +172,19 @@ func setPodSpecForTensorboard(view *viewerV1beta1.Viewer, s *corev1.PodSpec) { c := &s.Containers[0] c.Name = view.Name + "-pod" - c.Image = "tensorflow/tensorflow" + c.Image = view.Spec.TensorboardSpec.TensorflowImage c.Args = []string{ "tensorboard", fmt.Sprintf("--logdir=%s", view.Spec.TensorboardSpec.LogDir), fmt.Sprintf("--path_prefix=/tensorboard/%s/", view.Name), } + + tfImageVersion := strings.Split(view.Spec.TensorboardSpec.TensorflowImage, ":")[1] + // This is needed for tf 2.0 + if !strings.HasPrefix(tfImageVersion, `1.`) { + c.Args = append(c.Args, "--bind_all") + } + c.Ports = []corev1.ContainerPort{ corev1.ContainerPort{ContainerPort: viewerTargetPort}, } @@ -246,6 +260,7 @@ func serviceFrom(v *viewerV1beta1.Viewer, deploymentName string) *corev1.Service }, Ports: []corev1.ServicePort{ corev1.ServicePort{ + Name: "http", Protocol: corev1.ProtocolTCP, Port: 80, TargetPort: intstr.IntOrString{IntVal: viewerTargetPort}}, @@ -273,5 +288,5 @@ func (r *Reconciler) maybeDeleteOldestViewer(t viewerV1beta1.ViewerType, namespa } } - return r.Client.Delete(context.Background(), oldest, nil) + return r.Client.Delete(context.Background(), oldest) } diff --git a/backend/src/crd/controller/viewer/reconciler/reconciler_test.go b/backend/src/crd/controller/viewer/reconciler/reconciler_test.go index 19df04a4254..b303ec59925 100644 --- a/backend/src/crd/controller/viewer/reconciler/reconciler_test.go +++ b/backend/src/crd/controller/viewer/reconciler/reconciler_test.go @@ -38,6 +38,8 @@ import ( var viewer *Reconciler +const tensorflowImage = "potentially_custom_tensorflow:dummy" + func TestMain(m *testing.M) { viewerV1beta1.AddToScheme(scheme.Scheme) os.Exit(m.Run()) @@ -128,7 +130,8 @@ func TestReconcile_EachViewerCreatesADeployment(t *testing.T) { Spec: viewerV1beta1.ViewerSpec{ Type: viewerV1beta1.ViewerTypeTensorboard, TensorboardSpec: viewerV1beta1.TensorboardSpec{ - LogDir: "gs://tensorboard/logdir", + LogDir: "gs://tensorboard/logdir", + TensorflowImage: tensorflowImage, }, }, } @@ -174,11 +177,12 @@ func TestReconcile_EachViewerCreatesADeployment(t *testing.T) { Spec: corev1.PodSpec{ Containers: []corev1.Container{{ Name: "viewer-123-pod", - Image: "tensorflow/tensorflow", + Image: tensorflowImage, Args: []string{ "tensorboard", "--logdir=gs://tensorboard/logdir", - "--path_prefix=/tensorboard/viewer-123/"}, + "--path_prefix=/tensorboard/viewer-123/", + "--bind_all"}, Ports: []corev1.ContainerPort{{ContainerPort: 6006}}, }}}}}}} @@ -200,7 +204,8 @@ func TestReconcile_ViewerUsesSpecifiedVolumeMountsForDeployment(t *testing.T) { Spec: viewerV1beta1.ViewerSpec{ Type: viewerV1beta1.ViewerTypeTensorboard, TensorboardSpec: viewerV1beta1.TensorboardSpec{ - LogDir: "gs://tensorboard/logdir", + LogDir: "gs://tensorboard/logdir", + TensorflowImage: tensorflowImage, }, PodTemplateSpec: corev1.PodTemplateSpec{ Spec: corev1.PodSpec{ @@ -271,11 +276,12 @@ func TestReconcile_ViewerUsesSpecifiedVolumeMountsForDeployment(t *testing.T) { Spec: corev1.PodSpec{ Containers: []corev1.Container{{ Name: "viewer-123-pod", - Image: "tensorflow/tensorflow", + Image: tensorflowImage, Args: []string{ "tensorboard", "--logdir=gs://tensorboard/logdir", - "--path_prefix=/tensorboard/viewer-123/"}, + "--path_prefix=/tensorboard/viewer-123/", + "--bind_all"}, Ports: []corev1.ContainerPort{{ContainerPort: 6006}}, VolumeMounts: []v1.VolumeMount{ {Name: "/volume-mount-name", MountPath: "/mount/path"}, @@ -311,7 +317,8 @@ func TestReconcile_EachViewerCreatesAService(t *testing.T) { Spec: viewerV1beta1.ViewerSpec{ Type: viewerV1beta1.ViewerTypeTensorboard, TensorboardSpec: viewerV1beta1.TensorboardSpec{ - LogDir: "gs://tensorboard/logdir", + LogDir: "gs://tensorboard/logdir", + TensorflowImage: tensorflowImage, }, }, } @@ -352,6 +359,7 @@ func TestReconcile_EachViewerCreatesAService(t *testing.T) { }}, Spec: corev1.ServiceSpec{ Ports: []corev1.ServicePort{corev1.ServicePort{ + Name: "http", Protocol: corev1.ProtocolTCP, Port: int32(80), TargetPort: intstr.IntOrString{IntVal: viewerTargetPort}, @@ -381,7 +389,8 @@ func TestReconcile_UnknownViewerTypesAreIgnored(t *testing.T) { Spec: viewerV1beta1.ViewerSpec{ Type: "unknownType", TensorboardSpec: viewerV1beta1.TensorboardSpec{ - LogDir: "gs://tensorboard/logdir", + LogDir: "gs://tensorboard/logdir", + TensorflowImage: tensorflowImage, }, }, } @@ -451,7 +460,8 @@ func makeViewer(id int) (*types.NamespacedName, *viewerV1beta1.Viewer) { Spec: viewerV1beta1.ViewerSpec{ Type: viewerV1beta1.ViewerTypeTensorboard, TensorboardSpec: viewerV1beta1.TensorboardSpec{ - LogDir: "gs://tensorboard/logdir", + LogDir: "gs://tensorboard/logdir", + TensorflowImage: tensorflowImage, }, }, } diff --git a/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1/types.go b/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1/types.go index e8963341d8d..bbff7818a85 100644 --- a/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1/types.go +++ b/backend/src/crd/pkg/apis/scheduledworkflow/v1beta1/types.go @@ -56,6 +56,12 @@ type ScheduledWorkflowSpec struct { // +optional MaxConcurrency *int64 `json:"maxConcurrency,omitempty"` + // If NoCatchup is true, controller only schedules the latest period when + // cannot catch up. + // NoCatchup defaults to false if not specified. + // +optional + NoCatchup *bool `json:"noCatchup,omitempty"` + // Max number of completed workflows to keep track of. // If MaxHistory is not specified, MaxHistory is 10. // MaxHistory cannot be smaller than 0. diff --git a/backend/src/crd/pkg/apis/viewer/v1beta1/types.go b/backend/src/crd/pkg/apis/viewer/v1beta1/types.go index a9c7ae8b04f..378e5d63d81 100644 --- a/backend/src/crd/pkg/apis/viewer/v1beta1/types.go +++ b/backend/src/crd/pkg/apis/viewer/v1beta1/types.go @@ -59,7 +59,8 @@ const ( type TensorboardSpec struct { // LogDir is the location of the log directory to be read by tensorboard, i.e., // ---log_dir. - LogDir string `json:"logDir"` + LogDir string `json:"logDir"` + TensorflowImage string `json:"tensorflowImage"` } // ViewerSpec is the spec for a Viewer resource. diff --git a/backend/test/integration/BUILD.bazel b/backend/test/integration/BUILD.bazel index fd6644e8f74..2c0c7f07215 100644 --- a/backend/test/integration/BUILD.bazel +++ b/backend/test/integration/BUILD.bazel @@ -6,7 +6,9 @@ go_test( "experiment_api_test.go", "job_api_test.go", "pipeline_api_test.go", + "pipeline_version_api_test.go", "run_api_test.go", + "upgrade_test.go", "visualization_api_test.go", ], embed = [":go_default_library"], @@ -28,8 +30,10 @@ go_test( "//backend/test:go_default_library", "@com_github_argoproj_argo//pkg/apis/workflow/v1alpha1:go_default_library", "@com_github_ghodss_yaml//:go_default_library", + "@com_github_go_openapi_strfmt//:go_default_library", "@com_github_golang_glog//:go_default_library", "@com_github_stretchr_testify//assert:go_default_library", + "@com_github_stretchr_testify//require:go_default_library", "@com_github_stretchr_testify//suite:go_default_library", "@io_k8s_apimachinery//pkg/util/yaml:go_default_library", "@org_golang_google_grpc//:go_default_library", diff --git a/backend/test/integration/README.md b/backend/test/integration/README.md new file mode 100644 index 00000000000..ee6e5109717 --- /dev/null +++ b/backend/test/integration/README.md @@ -0,0 +1,11 @@ +## Api Server Integration Tests + +### WARNING +**These integration tests will delete all the data in your KFP instance, please only use a test cluster to run these.** + +### How to run + +1. Configure kubectl to connect to your kfp cluster. +2. Run the following for all integration tests: `NAMESPACE= ./run_tests_locally.sh`. +3. Or run the following to select certain tests: `NAMESPACE= ./run_tests_locally.sh -testify.m Job`. + Reference: https://stackoverflow.com/a/43312451 diff --git a/backend/test/integration/experiment_api_test.go b/backend/test/integration/experiment_api_test.go index 97b5e7fb753..f01dffeda2e 100644 --- a/backend/test/integration/experiment_api_test.go +++ b/backend/test/integration/experiment_api_test.go @@ -8,6 +8,11 @@ import ( "github.com/golang/glog" params "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_client/experiment_service" "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_model" + jobParams "github.com/kubeflow/pipelines/backend/api/go_http_client/job_client/job_service" + "github.com/kubeflow/pipelines/backend/api/go_http_client/job_model" + uploadParams "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service" + runParams "github.com/kubeflow/pipelines/backend/api/go_http_client/run_client/run_service" + "github.com/kubeflow/pipelines/backend/api/go_http_client/run_model" "github.com/kubeflow/pipelines/backend/src/common/client/api_server" "github.com/kubeflow/pipelines/backend/src/common/util" "github.com/kubeflow/pipelines/backend/test" @@ -17,8 +22,12 @@ import ( type ExperimentApiTest struct { suite.Suite - namespace string - experimentClient *api_server.ExperimentClient + namespace string + experimentClient *api_server.ExperimentClient + pipelineClient *api_server.PipelineClient + pipelineUploadClient *api_server.PipelineUploadClient + runClient *api_server.RunClient + jobClient *api_server.JobClient } // Check the namespace have ML job installed and ready @@ -28,16 +37,37 @@ func (s *ExperimentApiTest) SetupTest() { return } - err := test.WaitForReady(*namespace, *initializeTimeout) - if err != nil { - glog.Exitf("Failed to initialize test. Error: %v", err) + if !*isDevMode { + err := test.WaitForReady(*namespace, *initializeTimeout) + if err != nil { + glog.Exitf("Failed to initialize test. Error: %v", err) + } } s.namespace = *namespace clientConfig := test.GetClientConfig(*namespace) + var err error s.experimentClient, err = api_server.NewExperimentClient(clientConfig, false) if err != nil { glog.Exitf("Failed to get experiment client. Error: %v", err) } + s.pipelineUploadClient, err = api_server.NewPipelineUploadClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) + } + s.pipelineClient, err = api_server.NewPipelineClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get pipeline client. Error: %s", err.Error()) + } + s.runClient, err = api_server.NewRunClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get run client. Error: %s", err.Error()) + } + s.jobClient, err = api_server.NewJobClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get job client. Error: %s", err.Error()) + } + + s.cleanUp() } func (s *ExperimentApiTest) TestExperimentAPI() { @@ -57,7 +87,7 @@ func (s *ExperimentApiTest) TestExperimentAPI() { assert.Nil(t, err) expectedTrainingExperiment := &experiment_model.APIExperiment{ ID: trainingExperiment.ID, Name: experiment.Name, - Description: experiment.Description, CreatedAt: trainingExperiment.CreatedAt} + Description: experiment.Description, CreatedAt: trainingExperiment.CreatedAt, StorageState: "STORAGESTATE_AVAILABLE"} assert.Equal(t, expectedTrainingExperiment, trainingExperiment) /* ---------- Create an experiment with same name. Should fail due to name uniqueness ---------- */ @@ -155,10 +185,104 @@ func (s *ExperimentApiTest) TestExperimentAPI() { assert.Nil(t, err) assert.Equal(t, expectedTrainingExperiment, experiment) - /* ---------- Clean up ---------- */ - test.DeleteAllExperiments(s.experimentClient, t) + /* ---------- Create a pipeline version and two runs and two jobs -------------- */ + pipeline, err := s.pipelineUploadClient.UploadFile("../resources/hello-world.yaml", uploadParams.NewUploadPipelineParams()) + assert.Nil(t, err) + time.Sleep(1 * time.Second) + pipelineVersion, err := s.pipelineUploadClient.UploadPipelineVersion( + "../resources/hello-world.yaml", &uploadParams.UploadPipelineVersionParams{ + Name: util.StringPointer("hello-world-version"), + Pipelineid: util.StringPointer(pipeline.ID), + }) + assert.Nil(t, err) + createRunRequest := &runParams.CreateRunParams{Body: &run_model.APIRun{ + Name: "hello world", + Description: "this is hello world", + ResourceReferences: []*run_model.APIResourceReference{ + {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experiment.ID}, + Name: experiment.Name, Relationship: run_model.APIRelationshipOWNER}, + {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersion.ID}, + Relationship: run_model.APIRelationshipCREATOR}, + }, + }} + run1, _, err := s.runClient.Create(createRunRequest) + assert.Nil(t, err) + run2, _, err := s.runClient.Create(createRunRequest) + assert.Nil(t, err) + /* ---------- Create a new hello world job by specifying pipeline ID ---------- */ + createJobRequest := &jobParams.CreateJobParams{Body: &job_model.APIJob{ + Name: "hello world", + Description: "this is hello world", + ResourceReferences: []*job_model.APIResourceReference{ + {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experiment.ID}, + Relationship: job_model.APIRelationshipOWNER}, + {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersion.ID}, + Relationship: job_model.APIRelationshipCREATOR}, + }, + MaxConcurrency: 10, + Enabled: true, + }} + job1, err := s.jobClient.Create(createJobRequest) + assert.Nil(t, err) + job2, err := s.jobClient.Create(createJobRequest) + assert.Nil(t, err) + + /* ---------- Archive an experiment -----------------*/ + err = s.experimentClient.Archive(¶ms.ArchiveExperimentParams{ID: trainingExperiment.ID}) + + /* ---------- Verify experiment and its runs ------- */ + experiment, err = s.experimentClient.Get(¶ms.GetExperimentParams{ID: trainingExperiment.ID}) + assert.Nil(t, err) + assert.Equal(t, experiment_model.ExperimentStorageState("STORAGESTATE_ARCHIVED"), experiment.StorageState) + retrievedRun1, _, err := s.runClient.Get(&runParams.GetRunParams{RunID: run1.Run.ID}) + assert.Nil(t, err) + assert.Equal(t, run_model.RunStorageState("STORAGESTATE_ARCHIVED"), retrievedRun1.Run.StorageState) + retrievedRun2, _, err := s.runClient.Get(&runParams.GetRunParams{RunID: run2.Run.ID}) + assert.Nil(t, err) + assert.Equal(t, run_model.RunStorageState("STORAGESTATE_ARCHIVED"), retrievedRun2.Run.StorageState) + retrievedJob1, err := s.jobClient.Get(&jobParams.GetJobParams{ID: job1.ID}) + assert.Nil(t, err) + assert.Equal(t, false, retrievedJob1.Enabled) + retrievedJob2, err := s.jobClient.Get(&jobParams.GetJobParams{ID: job2.ID}) + assert.Nil(t, err) + assert.Equal(t, false, retrievedJob2.Enabled) + + /* ---------- Unarchive an experiment -----------------*/ + err = s.experimentClient.Unarchive(¶ms.UnarchiveExperimentParams{ID: trainingExperiment.ID}) + + /* ---------- Verify experiment and its runs and jobs --------- */ + experiment, err = s.experimentClient.Get(¶ms.GetExperimentParams{ID: trainingExperiment.ID}) + assert.Nil(t, err) + assert.Equal(t, experiment_model.ExperimentStorageState("STORAGESTATE_AVAILABLE"), experiment.StorageState) + retrievedRun1, _, err = s.runClient.Get(&runParams.GetRunParams{RunID: run1.Run.ID}) + assert.Nil(t, err) + assert.Equal(t, run_model.RunStorageState("STORAGESTATE_ARCHIVED"), retrievedRun1.Run.StorageState) + retrievedRun2, _, err = s.runClient.Get(&runParams.GetRunParams{RunID: run2.Run.ID}) + assert.Nil(t, err) + assert.Equal(t, run_model.RunStorageState("STORAGESTATE_ARCHIVED"), retrievedRun2.Run.StorageState) + retrievedJob1, err = s.jobClient.Get(&jobParams.GetJobParams{ID: job1.ID}) + assert.Nil(t, err) + assert.Equal(t, false, retrievedJob1.Enabled) + retrievedJob2, err = s.jobClient.Get(&jobParams.GetJobParams{ID: job2.ID}) + assert.Nil(t, err) + assert.Equal(t, false, retrievedJob2.Enabled) } func TestExperimentAPI(t *testing.T) { suite.Run(t, new(ExperimentApiTest)) } + +func (s *ExperimentApiTest) TearDownSuite() { + if *runIntegrationTests { + if !*isDevMode { + s.cleanUp() + } + } +} + +func (s *ExperimentApiTest) cleanUp() { + test.DeleteAllExperiments(s.experimentClient, s.T()) + test.DeleteAllPipelines(s.pipelineClient, s.T()) + test.DeleteAllRuns(s.runClient, s.T()) + test.DeleteAllJobs(s.jobClient, s.T()) +} diff --git a/backend/test/integration/flags.go b/backend/test/integration/flags.go index 488813c9f99..6e688bf3ee7 100644 --- a/backend/test/integration/flags.go +++ b/backend/test/integration/flags.go @@ -8,3 +8,11 @@ import ( var namespace = flag.String("namespace", "kubeflow", "The namespace ml pipeline deployed to") var initializeTimeout = flag.Duration("initializeTimeout", 2*time.Minute, "Duration to wait for test initialization") var runIntegrationTests = flag.Bool("runIntegrationTests", false, "Whether to also run integration tests that call the service") +var runUpgradeTests = flag.Bool("runUpgradeTests", false, "Whether to run upgrade tests") + +/** + * Differences in dev mode: + * 1. Resources are not cleaned up when a test finishes, so that developer can debug manually. + * 2. One step that doesn't work locally is skipped. + */ +var isDevMode = flag.Bool("isDevMode", false, "Dev mode helps local development of integration tests") diff --git a/backend/test/integration/job_api_test.go b/backend/test/integration/job_api_test.go index 58ee4d40991..6238310491c 100644 --- a/backend/test/integration/job_api_test.go +++ b/backend/test/integration/job_api_test.go @@ -2,11 +2,13 @@ package integration import ( "io/ioutil" + "sort" "testing" "time" "github.com/kubeflow/pipelines/backend/test" + "github.com/go-openapi/strfmt" "github.com/golang/glog" experimentparams "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_client/experiment_service" "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_model" @@ -23,6 +25,12 @@ import ( "k8s.io/apimachinery/pkg/util/yaml" ) +const ( + second = 1 + minute = 60 * second + hour = 60 * minute +) + type JobApiTestSuite struct { suite.Suite namespace string @@ -34,6 +42,12 @@ type JobApiTestSuite struct { jobClient *api_server.JobClient } +type JobResourceReferenceSorter []*job_model.APIResourceReference + +func (r JobResourceReferenceSorter) Len() int { return len(r) } +func (r JobResourceReferenceSorter) Less(i, j int) bool { return r[i].Name < r[j].Name } +func (r JobResourceReferenceSorter) Swap(i, j int) { r[i], r[j] = r[j], r[i] } + // Check the namespace have ML pipeline installed and ready func (s *JobApiTestSuite) SetupTest() { if !*runIntegrationTests { @@ -41,12 +55,15 @@ func (s *JobApiTestSuite) SetupTest() { return } - err := test.WaitForReady(*namespace, *initializeTimeout) - if err != nil { - glog.Exitf("Failed to initialize test. Error: %s", err.Error()) + if !*isDevMode { + err := test.WaitForReady(*namespace, *initializeTimeout) + if err != nil { + glog.Exitf("Failed to initialize test. Error: %s", err.Error()) + } } s.namespace = *namespace clientConfig := test.GetClientConfig(*namespace) + var err error s.experimentClient, err = api_server.NewExperimentClient(clientConfig, false) if err != nil { glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) @@ -67,6 +84,8 @@ func (s *JobApiTestSuite) SetupTest() { if err != nil { glog.Exitf("Failed to get job client. Error: %s", err.Error()) } + + s.cleanUp() } func (s *JobApiTestSuite) TestJobApis() { @@ -76,6 +95,15 @@ func (s *JobApiTestSuite) TestJobApis() { helloWorldPipeline, err := s.pipelineUploadClient.UploadFile("../resources/hello-world.yaml", uploadParams.NewUploadPipelineParams()) assert.Nil(t, err) + /* ---------- Upload pipeline version YAML ---------- */ + time.Sleep(1 * time.Second) + helloWorldPipelineVersion, err := s.pipelineUploadClient.UploadPipelineVersion( + "../resources/hello-world.yaml", &uploadParams.UploadPipelineVersionParams{ + Name: util.StringPointer("hello-world-version"), + Pipelineid: util.StringPointer(helloWorldPipeline.ID), + }) + assert.Nil(t, err) + /* ---------- Create a new hello world experiment ---------- */ experiment := &experiment_model.APIExperiment{Name: "hello world experiment"} helloWorldExperiment, err := s.experimentClient.Create(&experimentparams.CreateExperimentParams{Body: experiment}) @@ -85,24 +113,23 @@ func (s *JobApiTestSuite) TestJobApis() { createJobRequest := &jobparams.CreateJobParams{Body: &job_model.APIJob{ Name: "hello world", Description: "this is hello world", - PipelineSpec: &job_model.APIPipelineSpec{ - PipelineID: helloWorldPipeline.ID, - }, ResourceReferences: []*job_model.APIResourceReference{ {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: helloWorldExperiment.ID}, Relationship: job_model.APIRelationshipOWNER}, + {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION, ID: helloWorldPipelineVersion.ID}, + Relationship: job_model.APIRelationshipCREATOR}, }, MaxConcurrency: 10, Enabled: true, }} helloWorldJob, err := s.jobClient.Create(createJobRequest) assert.Nil(t, err) - s.checkHelloWorldJob(t, helloWorldJob, helloWorldExperiment.ID, helloWorldExperiment.Name, helloWorldPipeline.ID) + s.checkHelloWorldJob(t, helloWorldJob, helloWorldExperiment.ID, helloWorldExperiment.Name, helloWorldPipelineVersion.ID, helloWorldPipelineVersion.Name) /* ---------- Get hello world job ---------- */ helloWorldJob, err = s.jobClient.Get(&jobparams.GetJobParams{ID: helloWorldJob.ID}) assert.Nil(t, err) - s.checkHelloWorldJob(t, helloWorldJob, helloWorldExperiment.ID, helloWorldExperiment.Name, helloWorldPipeline.ID) + s.checkHelloWorldJob(t, helloWorldJob, helloWorldExperiment.ID, helloWorldExperiment.Name, helloWorldPipelineVersion.ID, helloWorldPipelineVersion.Name) /* ---------- Create a new argument parameter experiment ---------- */ experiment = &experiment_model.APIExperiment{Name: "argument parameter experiment"} @@ -207,30 +234,138 @@ func (s *JobApiTestSuite) TestJobApis() { assert.Equal(t, 1, totalSize) argParamsRun := runs[0] s.checkArgParamsRun(t, argParamsRun, argParamsExperiment.ID, argParamsExperiment.Name, argParamsJob.ID, argParamsJob.Name) +} + +func (s *JobApiTestSuite) TestJobApis_noCatchupOption() { + t := s.T() + + /* ---------- Upload pipelines YAML ---------- */ + pipeline, err := s.pipelineUploadClient.UploadFile("../resources/hello-world.yaml", uploadParams.NewUploadPipelineParams()) + assert.Nil(t, err) + + /* ---------- Upload pipeline version YAML ---------- */ + time.Sleep(1 * time.Second) + helloWorldPipelineVersion, err := s.pipelineUploadClient.UploadPipelineVersion( + "../resources/hello-world.yaml", &uploadParams.UploadPipelineVersionParams{ + Name: util.StringPointer("hello-world-version"), + Pipelineid: util.StringPointer(pipeline.ID), + }) + assert.Nil(t, err) + + /* ---------- Create a periodic job with start and end date in the past and catchup = true ---------- */ + experiment := &experiment_model.APIExperiment{Name: "periodic catchup true"} + periodicCatchupTrueExperiment, err := s.experimentClient.Create(&experimentparams.CreateExperimentParams{Body: experiment}) + assert.Nil(t, err) + + job := jobInThePastForTwoMinutes(jobOptions{ + pipelineVersionId: helloWorldPipelineVersion.ID, + experimentId: periodicCatchupTrueExperiment.ID, + periodic: true, + }) + job.Name = "periodic-catchup-true-" + job.Description = "A job with NoCatchup=false will backfill each past interval when behind schedule." + job.NoCatchup = false // This is the key difference. + createJobRequest := &jobparams.CreateJobParams{Body: job} + _, err = s.jobClient.Create(createJobRequest) + assert.Nil(t, err) + + /* -------- Create another periodic job with start and end date in the past but catchup = false ------ */ + experiment = &experiment_model.APIExperiment{Name: "periodic catchup false"} + periodicCatchupFalseExperiment, err := s.experimentClient.Create(&experimentparams.CreateExperimentParams{Body: experiment}) + assert.Nil(t, err) - /* ---------- Clean up ---------- */ - test.DeleteAllExperiments(s.experimentClient, t) - test.DeleteAllPipelines(s.pipelineClient, t) - test.DeleteAllJobs(s.jobClient, t) - test.DeleteAllRuns(s.runClient, t) + job = jobInThePastForTwoMinutes(jobOptions{ + pipelineVersionId: helloWorldPipelineVersion.ID, + experimentId: periodicCatchupFalseExperiment.ID, + periodic: true, + }) + job.Name = "periodic-catchup-false-" + job.Description = "A job with NoCatchup=true only schedules the last interval when behind schedule." + job.NoCatchup = true // This is the key difference. + createJobRequest = &jobparams.CreateJobParams{Body: job} + _, err = s.jobClient.Create(createJobRequest) + assert.Nil(t, err) + + /* ---------- Create a cron job with start and end date in the past and catchup = true ---------- */ + experiment = &experiment_model.APIExperiment{Name: "cron catchup true"} + cronCatchupTrueExperiment, err := s.experimentClient.Create(&experimentparams.CreateExperimentParams{Body: experiment}) + assert.Nil(t, err) + + job = jobInThePastForTwoMinutes(jobOptions{ + pipelineVersionId: helloWorldPipelineVersion.ID, + experimentId: cronCatchupTrueExperiment.ID, + periodic: false, + }) + job.Name = "cron-catchup-true-" + job.Description = "A job with NoCatchup=false will backfill each past interval when behind schedule." + job.NoCatchup = false // This is the key difference. + createJobRequest = &jobparams.CreateJobParams{Body: job} + _, err = s.jobClient.Create(createJobRequest) + assert.Nil(t, err) + + /* -------- Create another cron job with start and end date in the past but catchup = false ------ */ + experiment = &experiment_model.APIExperiment{Name: "cron catchup false"} + cronCatchupFalseExperiment, err := s.experimentClient.Create(&experimentparams.CreateExperimentParams{Body: experiment}) + assert.Nil(t, err) + + job = jobInThePastForTwoMinutes(jobOptions{ + pipelineVersionId: helloWorldPipelineVersion.ID, + experimentId: cronCatchupFalseExperiment.ID, + periodic: false, + }) + job.Name = "cron-catchup-false-" + job.Description = "A job with NoCatchup=true only schedules the last interval when behind schedule." + job.NoCatchup = true // This is the key difference. + createJobRequest = &jobparams.CreateJobParams{Body: job} + _, err = s.jobClient.Create(createJobRequest) + assert.Nil(t, err) + + // The scheduledWorkflow CRD would create the run and it synced to the DB by persistent agent. + // This could take a few seconds to finish. + // TODO: Retry list run every 5 seconds instead of sleeping for 40 seconds. + time.Sleep(40 * time.Second) + + /* ---------- Assert number of runs when catchup = true ---------- */ + _, runsWhenCatchupTrue, _, err := s.runClient.List(&runParams.ListRunsParams{ + ResourceReferenceKeyType: util.StringPointer(string(run_model.APIResourceTypeEXPERIMENT)), + ResourceReferenceKeyID: util.StringPointer(periodicCatchupTrueExperiment.ID)}) + assert.Nil(t, err) + assert.Equal(t, 2, runsWhenCatchupTrue) + _, runsWhenCatchupTrue, _, err = s.runClient.List(&runParams.ListRunsParams{ + ResourceReferenceKeyType: util.StringPointer(string(run_model.APIResourceTypeEXPERIMENT)), + ResourceReferenceKeyID: util.StringPointer(cronCatchupTrueExperiment.ID)}) + + /* ---------- Assert number of runs when catchup = false ---------- */ + _, runsWhenCatchupFalse, _, err := s.runClient.List(&runParams.ListRunsParams{ + ResourceReferenceKeyType: util.StringPointer(string(run_model.APIResourceTypeEXPERIMENT)), + ResourceReferenceKeyID: util.StringPointer(periodicCatchupFalseExperiment.ID)}) + assert.Nil(t, err) + assert.Equal(t, 1, runsWhenCatchupFalse) + _, runsWhenCatchupFalse, _, err = s.runClient.List(&runParams.ListRunsParams{ + ResourceReferenceKeyType: util.StringPointer(string(run_model.APIResourceTypeEXPERIMENT)), + ResourceReferenceKeyID: util.StringPointer(cronCatchupFalseExperiment.ID)}) + assert.Nil(t, err) + assert.Equal(t, 1, runsWhenCatchupFalse) } -func (s *JobApiTestSuite) checkHelloWorldJob(t *testing.T, job *job_model.APIJob, experimentID string, experimentName string, pipelineID string) { +func (s *JobApiTestSuite) checkHelloWorldJob(t *testing.T, job *job_model.APIJob, experimentID string, experimentName string, pipelineVersionId string, pipelineVersionName string) { // Check workflow manifest is not empty assert.Contains(t, job.PipelineSpec.WorkflowManifest, "whalesay") + expectedJob := &job_model.APIJob{ - ID: job.ID, - Name: "hello world", - Description: "this is hello world", + ID: job.ID, + Name: "hello world", + Description: "this is hello world", + ServiceAccount: "pipeline-runner", PipelineSpec: &job_model.APIPipelineSpec{ - PipelineID: pipelineID, - PipelineName: "hello-world.yaml", WorkflowManifest: job.PipelineSpec.WorkflowManifest, }, ResourceReferences: []*job_model.APIResourceReference{ {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experimentID}, Name: experimentName, Relationship: job_model.APIRelationshipOWNER, }, + {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersionId}, + Name: pipelineVersionName, Relationship: job_model.APIRelationshipCREATOR}, }, MaxConcurrency: 10, Enabled: true, @@ -240,6 +375,9 @@ func (s *JobApiTestSuite) checkHelloWorldJob(t *testing.T, job *job_model.APIJob Trigger: &job_model.APITrigger{}, } + // Need to sort resource references before equality check as the order is non-deterministic + sort.Sort(JobResourceReferenceSorter(job.ResourceReferences)) + sort.Sort(JobResourceReferenceSorter(expectedJob.ResourceReferences)) assert.Equal(t, expectedJob, job) } @@ -251,9 +389,10 @@ func (s *JobApiTestSuite) checkArgParamsJob(t *testing.T, job *job_model.APIJob, // Check runtime workflow manifest is not empty assert.Contains(t, job.PipelineSpec.WorkflowManifest, "arguments-parameters-") expectedJob := &job_model.APIJob{ - ID: job.ID, - Name: "argument parameter", - Description: "this is argument parameter", + ID: job.ID, + Name: "argument parameter", + Description: "this is argument parameter", + ServiceAccount: "pipeline-runner", PipelineSpec: &job_model.APIPipelineSpec{ WorkflowManifest: job.PipelineSpec.WorkflowManifest, Parameters: []*job_model.APIParameter{ @@ -310,3 +449,73 @@ func (s *JobApiTestSuite) checkArgParamsRun(t *testing.T, run *run_model.APIRun, func TestJobApi(t *testing.T) { suite.Run(t, new(JobApiTestSuite)) } + +func (s *JobApiTestSuite) TearDownSuite() { + if *runIntegrationTests { + if !*isDevMode { + s.cleanUp() + } + } +} + +/** ======== the following are util functions ========= **/ + +func (s *JobApiTestSuite) cleanUp() { + test.DeleteAllExperiments(s.experimentClient, s.T()) + test.DeleteAllPipelines(s.pipelineClient, s.T()) + test.DeleteAllJobs(s.jobClient, s.T()) + test.DeleteAllRuns(s.runClient, s.T()) +} + +func defaultApiJob(pipelineVersionId, experimentId string) *job_model.APIJob { + return &job_model.APIJob{ + Name: "default-pipeline-name", + Description: "This is a default pipeline", + ResourceReferences: []*job_model.APIResourceReference{ + {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experimentId}, + Relationship: job_model.APIRelationshipOWNER}, + {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersionId}, + Relationship: job_model.APIRelationshipCREATOR}, + }, + MaxConcurrency: 10, + NoCatchup: false, + Trigger: &job_model.APITrigger{ + PeriodicSchedule: &job_model.APIPeriodicSchedule{ + StartTime: strfmt.NewDateTime(), + EndTime: strfmt.NewDateTime(), + IntervalSecond: 60, + }, + }, + Enabled: true, + } +} + +type jobOptions struct { + pipelineVersionId, experimentId string + periodic bool +} + +func jobInThePastForTwoMinutes(options jobOptions) *job_model.APIJob { + startTime := strfmt.DateTime(time.Unix(10*hour, 0)) + endTime := strfmt.DateTime(time.Unix(10*hour+2*minute, 0)) + + job := defaultApiJob(options.pipelineVersionId, options.experimentId) + if options.periodic { + job.Trigger = &job_model.APITrigger{ + PeriodicSchedule: &job_model.APIPeriodicSchedule{ + StartTime: startTime, + EndTime: endTime, + IntervalSecond: 60, // Runs every 1 minute. + }, + } + } else { + job.Trigger = &job_model.APITrigger{ + CronSchedule: &job_model.APICronSchedule{ + StartTime: startTime, + EndTime: endTime, + Cron: "0 * * * * ?", // Runs every 1 minute. + }, + } + } + return job +} diff --git a/backend/test/integration/pipeline_api_test.go b/backend/test/integration/pipeline_api_test.go index 4ad99ca07d1..8e4e2a9d1d1 100644 --- a/backend/test/integration/pipeline_api_test.go +++ b/backend/test/integration/pipeline_api_test.go @@ -39,11 +39,14 @@ func (s *PipelineApiTest) SetupTest() { return } - err := test.WaitForReady(*namespace, *initializeTimeout) - if err != nil { - glog.Exitf("Failed to initialize test. Error: %s", err.Error()) + if !*isDevMode { + err := test.WaitForReady(*namespace, *initializeTimeout) + if err != nil { + glog.Exitf("Failed to initialize test. Error: %s", err.Error()) + } } clientConfig := test.GetClientConfig(*namespace) + var err error s.pipelineUploadClient, err = api_server.NewPipelineUploadClient(clientConfig, false) if err != nil { glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) @@ -52,6 +55,8 @@ func (s *PipelineApiTest) SetupTest() { if err != nil { glog.Exitf("Failed to get pipeline client. Error: %s", err.Error()) } + + s.cleanUp() } func (s *PipelineApiTest) TestPipelineAPI() { @@ -179,9 +184,6 @@ func (s *PipelineApiTest) TestPipelineAPI() { var expectedWorkflow v1alpha1.Workflow err = yaml.Unmarshal(expected, &expectedWorkflow) assert.Equal(t, expectedWorkflow, *template) - - /* ---------- Clean up ---------- */ - test.DeleteAllPipelines(s.pipelineClient, t) } func verifyPipeline(t *testing.T, pipeline *model.APIPipeline) { @@ -195,6 +197,16 @@ func verifyPipeline(t *testing.T, pipeline *model.APIPipeline) { {Name: "param1", Value: "hello"}, // Default value in the pipeline template {Name: "param2"}, // No default value in the pipeline }, + DefaultVersion: &model.APIPipelineVersion{ + CreatedAt: pipeline.CreatedAt, + ID: pipeline.ID, + Name: "arguments-parameters.yaml", + Parameters: []*model.APIParameter{ + {Name: "param1", Value: "hello"}, + {Name: "param2"}}, + ResourceReferences: []*model.APIResourceReference{{ + Key: &model.APIResourceKey{ID: pipeline.ID, Type: model.APIResourceTypePIPELINE}, + Relationship: model.APIRelationshipOWNER}}}, } assert.Equal(t, expected, *pipeline) } @@ -202,3 +214,15 @@ func verifyPipeline(t *testing.T, pipeline *model.APIPipeline) { func TestPipelineAPI(t *testing.T) { suite.Run(t, new(PipelineApiTest)) } + +func (s *PipelineApiTest) TearDownSuite() { + if *runIntegrationTests { + if !*isDevMode { + s.cleanUp() + } + } +} + +func (s *PipelineApiTest) cleanUp() { + test.DeleteAllPipelines(s.pipelineClient, s.T()) +} diff --git a/backend/test/integration/pipeline_version_api_test.go b/backend/test/integration/pipeline_version_api_test.go new file mode 100644 index 00000000000..c5eccef0536 --- /dev/null +++ b/backend/test/integration/pipeline_version_api_test.go @@ -0,0 +1,298 @@ +package integration + +import ( + "io/ioutil" + "testing" + "time" + + "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" + "github.com/ghodss/yaml" + "github.com/golang/glog" + params "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" + "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" + uploadParams "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service" + "github.com/kubeflow/pipelines/backend/src/common/client/api_server" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/test" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" +) + +// This test suit tests various methods to import pipeline to pipeline system, including +// - upload yaml file +// - upload tarball file +// - providing YAML file url +// - Providing tarball file url +type PipelineVersionApiTest struct { + suite.Suite + pipelineClient *api_server.PipelineClient + pipelineUploadClient *api_server.PipelineUploadClient +} + +// Check the namespace have ML job installed and ready +func (s *PipelineVersionApiTest) SetupTest() { + if !*runIntegrationTests { + s.T().SkipNow() + return + } + + if !*isDevMode { + err := test.WaitForReady(*namespace, *initializeTimeout) + if err != nil { + glog.Exitf("Failed to initialize test. Error: %s", err.Error()) + } + } + clientConfig := test.GetClientConfig(*namespace) + var err error + s.pipelineUploadClient, err = api_server.NewPipelineUploadClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) + } + s.pipelineClient, err = api_server.NewPipelineClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get pipeline client. Error: %s", err.Error()) + } + + s.cleanUp() +} + +func (s *PipelineVersionApiTest) TestPipelineVersionAPI() { + t := s.T() + + test.DeleteAllPipelines(s.pipelineClient, t) + + /* ---------- Upload a pipeline YAML ---------- */ + pipelineParams := uploadParams.NewUploadPipelineParams() + pipelineName := "test_pipeline" + pipelineParams.SetName(&pipelineName) + pipeline, err := s.pipelineUploadClient.UploadFile("../resources/arguments-parameters.yaml", pipelineParams) + assert.Nil(t, err) + assert.Equal(t, "test_pipeline", pipeline.Name) + + /* ---------- Get pipeline id ---------- */ + pipelines, totalSize, _, err := s.pipelineClient.List(params.NewListPipelinesParams()) + assert.Nil(t, err) + assert.Equal(t, 1, len(pipelines)) + assert.Equal(t, 1, totalSize) + pipelineId := pipelines[0].ID + + /* ---------- Upload a pipeline version YAML under test_pipeline ---------- */ + time.Sleep(1 * time.Second) + pipelineVersionParams := uploadParams.NewUploadPipelineVersionParams() + pipelineVersionParams.SetPipelineid(&pipelineId) + argumentYAMLPipelineVersion, err := s.pipelineUploadClient.UploadPipelineVersion("../resources/arguments-parameters.yaml", pipelineVersionParams) + assert.Nil(t, err) + assert.Equal(t, "arguments-parameters.yaml", argumentYAMLPipelineVersion.Name) + + /* ---------- Upload the same pipeline version again. Should fail due to name uniqueness ---------- */ + _, err = s.pipelineUploadClient.UploadPipelineVersion("../resources/arguments-parameters.yaml", uploadParams.NewUploadPipelineVersionParams()) + assert.NotNil(t, err) + assert.Contains(t, err.Error(), "Failed to upload pipeline version.") + + /* ---------- Import pipeline version YAML by URL ---------- */ + time.Sleep(1 * time.Second) + sequentialPipelineVersion, err := s.pipelineClient.CreatePipelineVersion(¶ms.CreatePipelineVersionParams{ + Body: &pipeline_model.APIPipelineVersion{ + Name: "sequential", + PackageURL: &pipeline_model.APIURL{ + PipelineURL: "https://storage.googleapis.com/ml-pipeline-dataset/sequential.yaml", + }, + ResourceReferences: []*pipeline_model.APIResourceReference{ + { + Key: &pipeline_model.APIResourceKey{Type: pipeline_model.APIResourceTypePIPELINE, ID: pipelineId}, + Relationship: pipeline_model.APIRelationshipOWNER, + }, + }, + }}) + assert.Nil(t, err) + assert.Equal(t, "sequential", sequentialPipelineVersion.Name) + + /* ---------- Upload pipeline version zip ---------- */ + time.Sleep(1 * time.Second) + argumentUploadPipelineVersion, err := s.pipelineUploadClient.UploadPipelineVersion( + "../resources/arguments.pipeline.zip", &uploadParams.UploadPipelineVersionParams{ + Name: util.StringPointer("zip-arguments-parameters"), + Pipelineid: util.StringPointer(pipelineId), + }) + assert.Nil(t, err) + assert.Equal(t, "zip-arguments-parameters", argumentUploadPipelineVersion.Name) + + /* ---------- Import pipeline tarball by URL ---------- */ + time.Sleep(1 * time.Second) + argumentUrlPipelineVersion, err := s.pipelineClient.CreatePipelineVersion(¶ms.CreatePipelineVersionParams{ + Body: &pipeline_model.APIPipelineVersion{ + Name: "arguments", + PackageURL: &pipeline_model.APIURL{ + PipelineURL: "https://storage.googleapis.com/ml-pipeline-dataset/arguments.pipeline.zip", + }, + ResourceReferences: []*pipeline_model.APIResourceReference{ + { + Key: &pipeline_model.APIResourceKey{Type: pipeline_model.APIResourceTypePIPELINE, ID: pipelineId}, + Relationship: pipeline_model.APIRelationshipOWNER, + }, + }, + }}) + assert.Nil(t, err) + assert.Equal(t, "arguments", argumentUrlPipelineVersion.Name) + + /* ---------- Verify list pipeline version works ---------- */ + pipelineVersions, totalSize, _, err := s.pipelineClient.ListPipelineVersions(¶ms.ListPipelineVersionsParams{ + ResourceKeyID: util.StringPointer(pipelineId), + ResourceKeyType: util.StringPointer("PIPELINE"), + }) + assert.Nil(t, err) + assert.Equal(t, 5, len(pipelineVersions)) + assert.Equal(t, 5, totalSize) + for _, p := range pipelineVersions { + assert.NotNil(t, *p) + assert.NotNil(t, p.CreatedAt) + assert.Contains(t, []string{"test_pipeline" /*default version created with pipeline*/, "sequential", "arguments", "arguments-parameters.yaml", "zip-arguments-parameters"}, p.Name) + + if p.Name == "arguments" { + assert.Equal(t, p.Parameters, + []*pipeline_model.APIParameter{ + {Name: "param1", Value: "hello"}, // Default value in the pipeline template + {Name: "param2"}, // No default value in the pipeline + }) + } + } + + /* ---------- Verify list pipeline sorted by names ---------- */ + listFirstPagePipelineVersions, totalSize, nextPageToken, err := s.pipelineClient.ListPipelineVersions( + ¶ms.ListPipelineVersionsParams{ + PageSize: util.Int32Pointer(3), + SortBy: util.StringPointer("name"), + ResourceKeyID: util.StringPointer(pipelineId), + ResourceKeyType: util.StringPointer("PIPELINE"), + }) + assert.Nil(t, err) + assert.Equal(t, 3, len(listFirstPagePipelineVersions)) + assert.Equal(t, 5, totalSize) + assert.Equal(t, "arguments", listFirstPagePipelineVersions[0].Name) + assert.Equal(t, "arguments-parameters.yaml", listFirstPagePipelineVersions[1].Name) + assert.Equal(t, "sequential", listFirstPagePipelineVersions[2].Name) + assert.NotEmpty(t, nextPageToken) + + listSecondPagePipelineVersions, totalSize, nextPageToken, err := s.pipelineClient.ListPipelineVersions( + ¶ms.ListPipelineVersionsParams{ + PageToken: util.StringPointer(nextPageToken), + PageSize: util.Int32Pointer(3), + SortBy: util.StringPointer("name"), + ResourceKeyID: util.StringPointer(pipelineId), + ResourceKeyType: util.StringPointer("PIPELINE"), + }) + assert.Nil(t, err) + assert.Equal(t, 2, len(listSecondPagePipelineVersions)) + assert.Equal(t, 5, totalSize) + assert.Equal(t, "test_pipeline", listSecondPagePipelineVersions[0].Name) + assert.Equal(t, "zip-arguments-parameters", listSecondPagePipelineVersions[1].Name) + assert.Empty(t, nextPageToken) + + /* ---------- Verify list pipeline version sorted by creation time ---------- */ + listFirstPagePipelineVersions, totalSize, nextPageToken, err = s.pipelineClient.ListPipelineVersions( + ¶ms.ListPipelineVersionsParams{ + PageSize: util.Int32Pointer(3), + SortBy: util.StringPointer("created_at"), + ResourceKeyID: util.StringPointer(pipelineId), + ResourceKeyType: util.StringPointer("PIPELINE"), + }) + assert.Nil(t, err) + assert.Equal(t, 3, len(listFirstPagePipelineVersions)) + assert.Equal(t, 5, totalSize) + assert.Equal(t, "test_pipeline", listFirstPagePipelineVersions[0].Name) + assert.Equal(t, "arguments-parameters.yaml", listFirstPagePipelineVersions[1].Name) + assert.Equal(t, "sequential", listFirstPagePipelineVersions[2].Name) + assert.NotEmpty(t, nextPageToken) + + listSecondPagePipelineVersions, totalSize, nextPageToken, err = s.pipelineClient.ListPipelineVersions( + ¶ms.ListPipelineVersionsParams{ + PageToken: util.StringPointer(nextPageToken), + PageSize: util.Int32Pointer(3), + SortBy: util.StringPointer("created_at"), + ResourceKeyID: util.StringPointer(pipelineId), + ResourceKeyType: util.StringPointer("PIPELINE"), + }) + assert.Nil(t, err) + assert.Equal(t, 2, len(listSecondPagePipelineVersions)) + assert.Equal(t, 5, totalSize) + assert.Equal(t, "zip-arguments-parameters", listSecondPagePipelineVersions[0].Name) + assert.Equal(t, "arguments", listSecondPagePipelineVersions[1].Name) + assert.Empty(t, nextPageToken) + + /* ---------- List pipeline versions sort by unsupported description field. Should fail. ---------- */ + _, _, _, err = s.pipelineClient.ListPipelineVersions(¶ms.ListPipelineVersionsParams{ + PageSize: util.Int32Pointer(2), + SortBy: util.StringPointer("unknownfield"), + ResourceKeyID: util.StringPointer(pipelineId), + ResourceKeyType: util.StringPointer("PIPELINE"), + }) + assert.NotNil(t, err) + + /* ---------- List pipeline versions sorted by names descend order ---------- */ + listFirstPagePipelineVersions, totalSize, nextPageToken, err = s.pipelineClient.ListPipelineVersions( + ¶ms.ListPipelineVersionsParams{ + PageSize: util.Int32Pointer(3), + SortBy: util.StringPointer("name desc"), + ResourceKeyID: util.StringPointer(pipelineId), + ResourceKeyType: util.StringPointer("PIPELINE"), + }) + assert.Nil(t, err) + assert.Equal(t, 3, len(listFirstPagePipelineVersions)) + assert.Equal(t, 5, totalSize) + assert.Equal(t, "zip-arguments-parameters", listFirstPagePipelineVersions[0].Name) + assert.Equal(t, "test_pipeline", listFirstPagePipelineVersions[1].Name) + assert.Equal(t, "sequential", listFirstPagePipelineVersions[2].Name) + assert.NotEmpty(t, nextPageToken) + + listSecondPagePipelineVersions, totalSize, nextPageToken, err = s.pipelineClient.ListPipelineVersions( + ¶ms.ListPipelineVersionsParams{ + PageToken: util.StringPointer(nextPageToken), + PageSize: util.Int32Pointer(3), + SortBy: util.StringPointer("name desc"), + ResourceKeyID: util.StringPointer(pipelineId), + ResourceKeyType: util.StringPointer("PIPELINE"), + }) + assert.Nil(t, err) + assert.Equal(t, 2, len(listSecondPagePipelineVersions)) + assert.Equal(t, 5, totalSize) + assert.Equal(t, "arguments-parameters.yaml", listSecondPagePipelineVersions[0].Name) + assert.Equal(t, "arguments", listSecondPagePipelineVersions[1].Name) + assert.Empty(t, nextPageToken) + + /* ---------- Verify get pipeline version works ---------- */ + pipelineVersion, err := s.pipelineClient.GetPipelineVersion(¶ms.GetPipelineVersionParams{VersionID: argumentUrlPipelineVersion.ID}) + assert.Nil(t, err) + assert.Equal(t, pipelineVersion.Name, "arguments") + assert.NotNil(t, pipelineVersion.CreatedAt) + assert.Equal(t, pipelineVersion.Parameters, + []*pipeline_model.APIParameter{ + {Name: "param1", Value: "hello"}, + {Name: "param2"}, + }) + + /* ---------- Verify get template works ---------- */ + template, err := s.pipelineClient.GetPipelineVersionTemplate(¶ms.GetPipelineVersionTemplateParams{VersionID: argumentYAMLPipelineVersion.ID}) + assert.Nil(t, err) + expected, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") + assert.Nil(t, err) + var expectedWorkflow v1alpha1.Workflow + err = yaml.Unmarshal(expected, &expectedWorkflow) + assert.Equal(t, expectedWorkflow, *template) +} + +func TestPipelineVersionAPI(t *testing.T) { + suite.Run(t, new(PipelineVersionApiTest)) +} + +func (s *PipelineVersionApiTest) TearDownSuite() { + if *runIntegrationTests { + if !*isDevMode { + s.cleanUp() + } + } +} + +func (s *PipelineVersionApiTest) cleanUp() { + // Delete pipelines will delete pipelines and their versions. + test.DeleteAllPipelines(s.pipelineClient, s.T()) +} diff --git a/backend/test/integration/run_api_test.go b/backend/test/integration/run_api_test.go index 48752c02a82..f25e7894b30 100644 --- a/backend/test/integration/run_api_test.go +++ b/backend/test/integration/run_api_test.go @@ -2,7 +2,9 @@ package integration import ( "io/ioutil" + "sort" "testing" + "time" "github.com/kubeflow/pipelines/backend/test" @@ -29,6 +31,12 @@ type RunApiTestSuite struct { runClient *api_server.RunClient } +type RunResourceReferenceSorter []*run_model.APIResourceReference + +func (r RunResourceReferenceSorter) Len() int { return len(r) } +func (r RunResourceReferenceSorter) Less(i, j int) bool { return r[i].Name < r[j].Name } +func (r RunResourceReferenceSorter) Swap(i, j int) { r[i], r[j] = r[j], r[i] } + // Check the namespace have ML pipeline installed and ready func (s *RunApiTestSuite) SetupTest() { if !*runIntegrationTests { @@ -36,12 +44,15 @@ func (s *RunApiTestSuite) SetupTest() { return } - err := test.WaitForReady(*namespace, *initializeTimeout) - if err != nil { - glog.Exitf("Failed to initialize test. Error: %s", err.Error()) + if !*isDevMode { + err := test.WaitForReady(*namespace, *initializeTimeout) + if err != nil { + glog.Exitf("Failed to initialize test. Error: %s", err.Error()) + } } s.namespace = *namespace clientConfig := test.GetClientConfig(*namespace) + var err error s.experimentClient, err = api_server.NewExperimentClient(clientConfig, false) if err != nil { glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) @@ -58,6 +69,8 @@ func (s *RunApiTestSuite) SetupTest() { if err != nil { glog.Exitf("Failed to get run client. Error: %s", err.Error()) } + + s.cleanUp() } func (s *RunApiTestSuite) TestRunApis() { @@ -67,31 +80,39 @@ func (s *RunApiTestSuite) TestRunApis() { helloWorldPipeline, err := s.pipelineUploadClient.UploadFile("../resources/hello-world.yaml", uploadParams.NewUploadPipelineParams()) assert.Nil(t, err) + /* ---------- Upload a pipeline version YAML under helloWorldPipeline ---------- */ + time.Sleep(1 * time.Second) + helloWorldPipelineVersion, err := s.pipelineUploadClient.UploadPipelineVersion( + "../resources/hello-world.yaml", &uploadParams.UploadPipelineVersionParams{ + Name: util.StringPointer("hello-world-version"), + Pipelineid: util.StringPointer(helloWorldPipeline.ID), + }) + assert.Nil(t, err) + /* ---------- Create a new hello world experiment ---------- */ experiment := &experiment_model.APIExperiment{Name: "hello world experiment"} helloWorldExperiment, err := s.experimentClient.Create(&experimentparams.CreateExperimentParams{Body: experiment}) assert.Nil(t, err) - /* ---------- Create a new hello world run by specifying pipeline ID ---------- */ + /* ---------- Create a new hello world run by specifying pipeline version ID ---------- */ createRunRequest := &runparams.CreateRunParams{Body: &run_model.APIRun{ Name: "hello world", Description: "this is hello world", - PipelineSpec: &run_model.APIPipelineSpec{ - PipelineID: helloWorldPipeline.ID, - }, ResourceReferences: []*run_model.APIResourceReference{ {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: helloWorldExperiment.ID}, Name: helloWorldExperiment.Name, Relationship: run_model.APIRelationshipOWNER}, + {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: helloWorldPipelineVersion.ID}, + Relationship: run_model.APIRelationshipCREATOR}, }, }} helloWorldRunDetail, _, err := s.runClient.Create(createRunRequest) assert.Nil(t, err) - s.checkHelloWorldRunDetail(t, helloWorldRunDetail, helloWorldExperiment.ID, helloWorldExperiment.Name, helloWorldPipeline.ID) + s.checkHelloWorldRunDetail(t, helloWorldRunDetail, helloWorldExperiment.ID, helloWorldExperiment.Name, helloWorldPipelineVersion.ID, helloWorldPipelineVersion.Name) /* ---------- Get hello world run ---------- */ helloWorldRunDetail, _, err = s.runClient.Get(&runparams.GetRunParams{RunID: helloWorldRunDetail.Run.ID}) assert.Nil(t, err) - s.checkHelloWorldRunDetail(t, helloWorldRunDetail, helloWorldExperiment.ID, helloWorldExperiment.Name, helloWorldPipeline.ID) + s.checkHelloWorldRunDetail(t, helloWorldRunDetail, helloWorldExperiment.ID, helloWorldExperiment.Name, helloWorldPipelineVersion.ID, helloWorldPipelineVersion.Name) /* ---------- Create a new argument parameter experiment ---------- */ createExperimentRequest := &experimentparams.CreateExperimentParams{Body: &experiment_model.APIExperiment{Name: "argument parameter experiment"}} @@ -191,16 +212,23 @@ func (s *RunApiTestSuite) TestRunApis() { longRunningPipeline, err := s.pipelineUploadClient.UploadFile("../resources/long-running.yaml", uploadParams.NewUploadPipelineParamsWithTimeout(350)) assert.Nil(t, err) + /* ---------- Upload a long-running pipeline version YAML under longRunningPipeline ---------- */ + time.Sleep(1 * time.Second) + longRunningPipelineVersion, err := s.pipelineUploadClient.UploadPipelineVersion("../resources/long-running.yaml", &uploadParams.UploadPipelineVersionParams{ + Name: util.StringPointer("long-running-version"), + Pipelineid: util.StringPointer(longRunningPipeline.ID), + }) + assert.Nil(t, err) + /* ---------- Create a new long-running run by specifying pipeline ID ---------- */ createLongRunningRunRequest := &runparams.CreateRunParams{Body: &run_model.APIRun{ Name: "long running", Description: "this pipeline will run long enough for us to manually terminate it before it finishes", - PipelineSpec: &run_model.APIPipelineSpec{ - PipelineID: longRunningPipeline.ID, - }, ResourceReferences: []*run_model.APIResourceReference{ {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: helloWorldExperiment.ID}, Relationship: run_model.APIRelationshipOWNER}, + {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: longRunningPipelineVersion.ID}, + Relationship: run_model.APIRelationshipCREATOR}, }, }} longRunningRunDetail, _, err := s.runClient.Create(createLongRunningRunRequest) @@ -214,67 +242,73 @@ func (s *RunApiTestSuite) TestRunApis() { /* ---------- Get long-running run ---------- */ longRunningRunDetail, _, err = s.runClient.Get(&runparams.GetRunParams{RunID: longRunningRunDetail.Run.ID}) assert.Nil(t, err) - s.checkTerminatedRunDetail(t, longRunningRunDetail, helloWorldExperiment.ID, helloWorldExperiment.Name, longRunningPipeline.ID) - - /* ---------- Clean up ---------- */ - test.DeleteAllExperiments(s.experimentClient, t) - test.DeleteAllPipelines(s.pipelineClient, t) - test.DeleteAllRuns(s.runClient, t) + s.checkTerminatedRunDetail(t, longRunningRunDetail, helloWorldExperiment.ID, helloWorldExperiment.Name, longRunningPipelineVersion.ID, longRunningPipelineVersion.Name) } -func (s *RunApiTestSuite) checkTerminatedRunDetail(t *testing.T, runDetail *run_model.APIRunDetail, experimentId string, experimentName string, pipelineId string) { +func (s *RunApiTestSuite) checkTerminatedRunDetail(t *testing.T, runDetail *run_model.APIRunDetail, experimentId string, experimentName string, pipelineVersionId string, pipelineVersionName string) { // Check workflow manifest is not empty assert.Contains(t, runDetail.Run.PipelineSpec.WorkflowManifest, "wait-awhile") // Check runtime workflow manifest is not empty assert.Contains(t, runDetail.PipelineRuntime.WorkflowManifest, "wait-awhile") expectedRun := &run_model.APIRun{ - ID: runDetail.Run.ID, - Name: "long running", - Description: "this pipeline will run long enough for us to manually terminate it before it finishes", - Status: "Terminating", + ID: runDetail.Run.ID, + Name: "long running", + Description: "this pipeline will run long enough for us to manually terminate it before it finishes", + Status: "Terminating", + ServiceAccount: "pipeline-runner", PipelineSpec: &run_model.APIPipelineSpec{ - PipelineID: pipelineId, - PipelineName: "long-running.yaml", WorkflowManifest: runDetail.Run.PipelineSpec.WorkflowManifest, }, ResourceReferences: []*run_model.APIResourceReference{ {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experimentId}, Name: experimentName, Relationship: run_model.APIRelationshipOWNER, }, + {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersionId}, + Name: pipelineVersionName, Relationship: run_model.APIRelationshipCREATOR}, }, CreatedAt: runDetail.Run.CreatedAt, ScheduledAt: runDetail.Run.ScheduledAt, FinishedAt: runDetail.Run.FinishedAt, } + + // Need to sort resource references before equality check as the order is non-deterministic + sort.Sort(RunResourceReferenceSorter(runDetail.Run.ResourceReferences)) + sort.Sort(RunResourceReferenceSorter(expectedRun.ResourceReferences)) assert.Equal(t, expectedRun, runDetail.Run) } -func (s *RunApiTestSuite) checkHelloWorldRunDetail(t *testing.T, runDetail *run_model.APIRunDetail, experimentId string, experimentName string, pipelineId string) { +func (s *RunApiTestSuite) checkHelloWorldRunDetail(t *testing.T, runDetail *run_model.APIRunDetail, experimentId string, experimentName string, pipelineVersionId string, pipelineVersionName string) { // Check workflow manifest is not empty assert.Contains(t, runDetail.Run.PipelineSpec.WorkflowManifest, "whalesay") // Check runtime workflow manifest is not empty assert.Contains(t, runDetail.PipelineRuntime.WorkflowManifest, "whalesay") expectedRun := &run_model.APIRun{ - ID: runDetail.Run.ID, - Name: "hello world", - Description: "this is hello world", - Status: runDetail.Run.Status, + ID: runDetail.Run.ID, + Name: "hello world", + Description: "this is hello world", + Status: runDetail.Run.Status, + ServiceAccount: "pipeline-runner", PipelineSpec: &run_model.APIPipelineSpec{ - PipelineID: pipelineId, - PipelineName: "hello-world.yaml", WorkflowManifest: runDetail.Run.PipelineSpec.WorkflowManifest, }, ResourceReferences: []*run_model.APIResourceReference{ {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: experimentId}, Name: experimentName, Relationship: run_model.APIRelationshipOWNER, }, + {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypePIPELINEVERSION, ID: pipelineVersionId}, + Name: pipelineVersionName, Relationship: run_model.APIRelationshipCREATOR, + }, }, CreatedAt: runDetail.Run.CreatedAt, ScheduledAt: runDetail.Run.ScheduledAt, FinishedAt: runDetail.Run.FinishedAt, } + + // Need to sort resource references before equality check as the order is non-deterministic + sort.Sort(RunResourceReferenceSorter(runDetail.Run.ResourceReferences)) + sort.Sort(RunResourceReferenceSorter(expectedRun.ResourceReferences)) assert.Equal(t, expectedRun, runDetail.Run) } @@ -286,10 +320,11 @@ func (s *RunApiTestSuite) checkArgParamsRunDetail(t *testing.T, runDetail *run_m // Check runtime workflow manifest is not empty assert.Contains(t, runDetail.PipelineRuntime.WorkflowManifest, "arguments-parameters-") expectedRun := &run_model.APIRun{ - ID: runDetail.Run.ID, - Name: "argument parameter", - Description: "this is argument parameter", - Status: runDetail.Run.Status, + ID: runDetail.Run.ID, + Name: "argument parameter", + Description: "this is argument parameter", + Status: runDetail.Run.Status, + ServiceAccount: "pipeline-runner", PipelineSpec: &run_model.APIPipelineSpec{ WorkflowManifest: string(argParamsBytes), Parameters: []*run_model.APIParameter{ @@ -306,9 +341,25 @@ func (s *RunApiTestSuite) checkArgParamsRunDetail(t *testing.T, runDetail *run_m ScheduledAt: runDetail.Run.ScheduledAt, FinishedAt: runDetail.Run.FinishedAt, } + assert.Equal(t, expectedRun, runDetail.Run) } func TestRunApi(t *testing.T) { suite.Run(t, new(RunApiTestSuite)) } + +func (s *RunApiTestSuite) TearDownSuite() { + if *runIntegrationTests { + if !*isDevMode { + s.cleanUp() + } + } +} + +func (s *RunApiTestSuite) cleanUp() { + /* ---------- Clean up ---------- */ + test.DeleteAllExperiments(s.experimentClient, s.T()) + test.DeleteAllPipelines(s.pipelineClient, s.T()) + test.DeleteAllRuns(s.runClient, s.T()) +} diff --git a/backend/test/integration/run_tests_locally.sh b/backend/test/integration/run_tests_locally.sh new file mode 100755 index 00000000000..3240476c900 --- /dev/null +++ b/backend/test/integration/run_tests_locally.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +set -e + +if [ -z "${NAMESPACE}" ]; then + echo "NAMESPACE env var is not provided, please set it to your KFP namespace" + exit +fi + +echo "The api integration tests run against the cluster your kubectl communicates to."; +echo "It's currently '$(kubectl config current-context)'." +echo "WARNING: this will clear up all existing KFP data in this cluster." +read -r -p "Are you sure? [y/N] " response +case "$response" in + [yY][eE][sS]|[yY]) + ;; + *) + exit + ;; +esac + +echo "Starting integration tests..." +command="go test -v ./... -namespace ${NAMESPACE} -args -runIntegrationTests=true -isDevMode=true" +echo $command "$@" +$command "$@" diff --git a/backend/test/integration/upgrade_test.go b/backend/test/integration/upgrade_test.go new file mode 100644 index 00000000000..4245f229c70 --- /dev/null +++ b/backend/test/integration/upgrade_test.go @@ -0,0 +1,446 @@ +package integration + +import ( + "io/ioutil" + "testing" + "time" + + "github.com/argoproj/argo/pkg/apis/workflow/v1alpha1" + "github.com/ghodss/yaml" + "github.com/golang/glog" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" + + experimentParams "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_client/experiment_service" + "github.com/kubeflow/pipelines/backend/api/go_http_client/experiment_model" + jobparams "github.com/kubeflow/pipelines/backend/api/go_http_client/job_client/job_service" + "github.com/kubeflow/pipelines/backend/api/go_http_client/job_model" + pipelineParams "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_client/pipeline_service" + "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_model" + uploadParams "github.com/kubeflow/pipelines/backend/api/go_http_client/pipeline_upload_client/pipeline_upload_service" + runParams "github.com/kubeflow/pipelines/backend/api/go_http_client/run_client/run_service" + "github.com/kubeflow/pipelines/backend/api/go_http_client/run_model" + "github.com/kubeflow/pipelines/backend/src/common/client/api_server" + "github.com/kubeflow/pipelines/backend/src/common/util" + "github.com/kubeflow/pipelines/backend/test" +) + +// Methods are organized into two types: "prepare" and "verify". +// "prepare" tests setup resources before upgrade +// "verify" tests verifies resources are expected after upgrade +type UpgradeTests struct { + suite.Suite + namespace string + experimentClient *api_server.ExperimentClient + pipelineClient *api_server.PipelineClient + pipelineUploadClient *api_server.PipelineUploadClient + runClient *api_server.RunClient + jobClient *api_server.JobClient +} + +func TestUpgrade(t *testing.T) { + suite.Run(t, new(UpgradeTests)) +} + +func (s *UpgradeTests) TestPrepare() { + t := s.T() + + test.DeleteAllJobs(s.jobClient, t) + test.DeleteAllRuns(s.runClient, t) + test.DeleteAllPipelines(s.pipelineClient, t) + test.DeleteAllExperiments(s.experimentClient, t) + + s.PrepareExperiments() + s.PreparePipelines() + s.PrepareRuns() + s.PrepareJobs() +} + +func (s *UpgradeTests) TestVerify() { + s.VerifyExperiments() + s.VerifyPipelines() + // TODO(jingzhang36): temporarily comment out the verification of runs and + // jobs since this PR changes the API response and hence a diff between the + // response from previous release and that from this PR is expected. + // Will put them back after the next release is cut. + // s.VerifyRuns() + // s.VerifyJobs() +} + +// Check the namespace have ML job installed and ready +func (s *UpgradeTests) SetupSuite() { + // Integration tests also run these tests to first ensure they work, so that + // when integration tests pass and upgrade tests fail, we know for sure + // upgrade process went wrong somehow. + if !(*runIntegrationTests || *runUpgradeTests) { + s.T().SkipNow() + return + } + + var err error + if !*isDevMode { + err = test.WaitForReady(*namespace, *initializeTimeout) + if err != nil { + glog.Exitf("Failed to initialize test. Error: %v", err) + } + } + s.namespace = *namespace + clientConfig := test.GetClientConfig(*namespace) + s.experimentClient, err = api_server.NewExperimentClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get experiment client. Error: %v", err) + } + s.pipelineUploadClient, err = api_server.NewPipelineUploadClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get pipeline upload client. Error: %s", err.Error()) + } + s.pipelineClient, err = api_server.NewPipelineClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get pipeline client. Error: %s", err.Error()) + } + s.runClient, err = api_server.NewRunClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get run client. Error: %s", err.Error()) + } + s.jobClient, err = api_server.NewJobClient(clientConfig, false) + if err != nil { + glog.Exitf("Failed to get job client. Error: %s", err.Error()) + } +} + +func (s *UpgradeTests) TearDownSuite() { + if *runIntegrationTests { + t := s.T() + + // Clean up after the suite to unblock other tests. (Not needed for upgrade + // tests because it needs changes in prepare tests to persist and verified + // later.) + test.DeleteAllExperiments(s.experimentClient, t) + test.DeleteAllPipelines(s.pipelineClient, t) + test.DeleteAllRuns(s.runClient, t) + test.DeleteAllJobs(s.jobClient, t) + } +} + +func (s *UpgradeTests) PrepareExperiments() { + t := s.T() + + /* ---------- Create a new experiment ---------- */ + experiment := &experiment_model.APIExperiment{Name: "training", Description: "my first experiment"} + _, err := s.experimentClient.Create(&experimentParams.CreateExperimentParams{ + Body: experiment, + }) + require.Nil(t, err) + + /* ---------- Create a few more new experiment ---------- */ + // This ensures they can be sorted by create time in expected order. + time.Sleep(1 * time.Second) + experiment = &experiment_model.APIExperiment{Name: "prediction", Description: "my second experiment"} + _, err = s.experimentClient.Create(&experimentParams.CreateExperimentParams{ + Body: experiment, + }) + require.Nil(t, err) + + time.Sleep(1 * time.Second) + experiment = &experiment_model.APIExperiment{Name: "moonshot", Description: "my third experiment"} + _, err = s.experimentClient.Create(&experimentParams.CreateExperimentParams{ + Body: experiment, + }) + require.Nil(t, err) +} + +func (s *UpgradeTests) VerifyExperiments() { + t := s.T() + + /* ---------- Verify list experiments sorted by creation time ---------- */ + experiments, _, _, err := s.experimentClient.List( + &experimentParams.ListExperimentParams{SortBy: util.StringPointer("created_at")}) + require.Nil(t, err) + // after upgrade, default experiment may be inserted, but the oldest 3 + // experiments should be the ones created in this test + require.True(t, len(experiments) >= 3) + + assert.Equal(t, "training", experiments[0].Name) + assert.Equal(t, "my first experiment", experiments[0].Description) + assert.NotEmpty(t, experiments[0].ID) + assert.NotEmpty(t, experiments[0].CreatedAt) + + assert.Equal(t, "prediction", experiments[1].Name) + assert.Equal(t, "my second experiment", experiments[1].Description) + assert.NotEmpty(t, experiments[1].ID) + assert.NotEmpty(t, experiments[1].CreatedAt) + + assert.Equal(t, "moonshot", experiments[2].Name) + assert.Equal(t, "my third experiment", experiments[2].Description) + assert.NotEmpty(t, experiments[2].ID) + assert.NotEmpty(t, experiments[2].CreatedAt) +} + +// TODO(jingzhang36): prepare pipeline versions. +func (s *UpgradeTests) PreparePipelines() { + t := s.T() + + test.DeleteAllPipelines(s.pipelineClient, t) + + /* ---------- Upload pipelines YAML ---------- */ + argumentYAMLPipeline, err := s.pipelineUploadClient.UploadFile("../resources/arguments-parameters.yaml", uploadParams.NewUploadPipelineParams()) + require.Nil(t, err) + assert.Equal(t, "arguments-parameters.yaml", argumentYAMLPipeline.Name) + + /* ---------- Import pipeline YAML by URL ---------- */ + time.Sleep(1 * time.Second) + sequentialPipeline, err := s.pipelineClient.Create(&pipelineParams.CreatePipelineParams{ + Body: &pipeline_model.APIPipeline{Name: "sequential", URL: &pipeline_model.APIURL{ + PipelineURL: "https://storage.googleapis.com/ml-pipeline-dataset/sequential.yaml"}}}) + require.Nil(t, err) + assert.Equal(t, "sequential", sequentialPipeline.Name) + + /* ---------- Upload pipelines zip ---------- */ + time.Sleep(1 * time.Second) + argumentUploadPipeline, err := s.pipelineUploadClient.UploadFile( + "../resources/arguments.pipeline.zip", &uploadParams.UploadPipelineParams{Name: util.StringPointer("zip-arguments-parameters")}) + require.Nil(t, err) + assert.Equal(t, "zip-arguments-parameters", argumentUploadPipeline.Name) + + /* ---------- Import pipeline tarball by URL ---------- */ + time.Sleep(1 * time.Second) + argumentUrlPipeline, err := s.pipelineClient.Create(&pipelineParams.CreatePipelineParams{ + Body: &pipeline_model.APIPipeline{URL: &pipeline_model.APIURL{ + PipelineURL: "https://storage.googleapis.com/ml-pipeline-dataset/arguments.pipeline.zip"}}}) + require.Nil(t, err) + assert.Equal(t, "arguments.pipeline.zip", argumentUrlPipeline.Name) + + time.Sleep(1 * time.Second) +} + +func (s *UpgradeTests) VerifyPipelines() { + t := s.T() + + /* ---------- Verify list pipeline sorted by creation time ---------- */ + pipelines, _, _, err := s.pipelineClient.List( + &pipelineParams.ListPipelinesParams{SortBy: util.StringPointer("created_at")}) + require.Nil(t, err) + // During upgrade, default pipelines may be installed, so we only verify the + // 4 oldest pipelines here. + assert.True(t, len(pipelines) >= 4) + assert.Equal(t, "arguments-parameters.yaml", pipelines[0].Name) + assert.Equal(t, "sequential", pipelines[1].Name) + assert.Equal(t, "zip-arguments-parameters", pipelines[2].Name) + assert.Equal(t, "arguments.pipeline.zip", pipelines[3].Name) + + verifyPipeline(t, pipelines[0]) + + /* ---------- Verify get template works ---------- */ + template, err := s.pipelineClient.GetTemplate(&pipelineParams.GetTemplateParams{ID: pipelines[0].ID}) + require.Nil(t, err) + expected, err := ioutil.ReadFile("../resources/arguments-parameters.yaml") + require.Nil(t, err) + var expectedWorkflow v1alpha1.Workflow + err = yaml.Unmarshal(expected, &expectedWorkflow) + assert.Equal(t, expectedWorkflow, *template) +} + +func (s *UpgradeTests) PrepareRuns() { + t := s.T() + + helloWorldPipeline := s.getHelloWorldPipeline(true) + helloWorldExperiment := s.getHelloWorldExperiment(true) + if helloWorldExperiment == nil { + helloWorldExperiment = s.createHelloWorldExperiment() + } + + hello2 := s.getHelloWorldExperiment(true) + require.Equal(t, hello2, helloWorldExperiment) + + /* ---------- Create a new hello world run by specifying pipeline ID ---------- */ + createRunRequest := &runParams.CreateRunParams{Body: &run_model.APIRun{ + Name: "hello world", + Description: "this is hello world", + PipelineSpec: &run_model.APIPipelineSpec{ + PipelineID: helloWorldPipeline.ID, + }, + ResourceReferences: []*run_model.APIResourceReference{ + {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: helloWorldExperiment.ID}, + Name: helloWorldExperiment.Name, Relationship: run_model.APIRelationshipOWNER}, + }, + }} + _, _, err := s.runClient.Create(createRunRequest) + require.Nil(t, err) +} + +func (s *UpgradeTests) VerifyRuns() { + t := s.T() + + /* ---------- List the runs, sorted by creation time ---------- */ + runs, _, _, err := s.runClient.List( + &runParams.ListRunsParams{SortBy: util.StringPointer("created_at")}) + require.Nil(t, err) + require.True(t, len(runs) >= 1) + require.Equal(t, "hello world", runs[0].Name) + + /* ---------- Get hello world run ---------- */ + helloWorldRunDetail, _, err := s.runClient.Get(&runParams.GetRunParams{RunID: runs[0].ID}) + require.Nil(t, err) + checkHelloWorldRunDetail(t, helloWorldRunDetail) +} + +func (s *UpgradeTests) PrepareJobs() { + t := s.T() + + pipeline := s.getHelloWorldPipeline(true) + experiment := s.getHelloWorldExperiment(true) + + /* ---------- Create a new hello world job by specifying pipeline ID ---------- */ + createJobRequest := &jobparams.CreateJobParams{Body: &job_model.APIJob{ + Name: "hello world", + Description: "this is hello world", + PipelineSpec: &job_model.APIPipelineSpec{ + PipelineID: pipeline.ID, + }, + ResourceReferences: []*job_model.APIResourceReference{ + {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experiment.ID}, + Relationship: job_model.APIRelationshipOWNER}, + }, + MaxConcurrency: 10, + Enabled: true, + NoCatchup: true, + }} + _, err := s.jobClient.Create(createJobRequest) + require.Nil(t, err) +} + +func (s *UpgradeTests) VerifyJobs() { + t := s.T() + + pipeline := s.getHelloWorldPipeline(false) + experiment := s.getHelloWorldExperiment(false) + + /* ---------- Get hello world job ---------- */ + jobs, _, _, err := s.jobClient.List(&jobparams.ListJobsParams{}) + require.Nil(t, err) + require.Len(t, jobs, 1) + job := jobs[0] + + // Check workflow manifest is not empty + assert.Contains(t, job.PipelineSpec.WorkflowManifest, "whalesay") + expectedJob := &job_model.APIJob{ + ID: job.ID, + Name: "hello world", + Description: "this is hello world", + PipelineSpec: &job_model.APIPipelineSpec{ + PipelineID: pipeline.ID, + PipelineName: "hello-world.yaml", + WorkflowManifest: job.PipelineSpec.WorkflowManifest, + }, + ResourceReferences: []*job_model.APIResourceReference{ + {Key: &job_model.APIResourceKey{Type: job_model.APIResourceTypeEXPERIMENT, ID: experiment.ID}, + Name: experiment.Name, Relationship: job_model.APIRelationshipOWNER, + }, + {Key: &job_model.APIResourceKey{ID: pipeline.ID, Type: job_model.APIResourceTypePIPELINEVERSION}, + Name: "hello-world.yaml", Relationship: job_model.APIRelationshipCREATOR, + }, + }, + MaxConcurrency: 10, + NoCatchup: true, + Enabled: true, + CreatedAt: job.CreatedAt, + UpdatedAt: job.UpdatedAt, + Status: job.Status, + Trigger: &job_model.APITrigger{}, + } + + assert.Equal(t, expectedJob, job) +} + +func checkHelloWorldRunDetail(t *testing.T, runDetail *run_model.APIRunDetail) { + // Check workflow manifest is not empty + assert.Contains(t, runDetail.Run.PipelineSpec.WorkflowManifest, "whalesay") + // Check runtime workflow manifest is not empty + assert.Contains(t, runDetail.PipelineRuntime.WorkflowManifest, "whalesay") + + expectedRun := &run_model.APIRun{ + ID: runDetail.Run.ID, + Name: "hello world", + Description: "this is hello world", + Status: runDetail.Run.Status, + PipelineSpec: &run_model.APIPipelineSpec{ + PipelineID: runDetail.Run.PipelineSpec.PipelineID, + PipelineName: "hello-world.yaml", + WorkflowManifest: runDetail.Run.PipelineSpec.WorkflowManifest, + }, + ResourceReferences: []*run_model.APIResourceReference{ + {Key: &run_model.APIResourceKey{Type: run_model.APIResourceTypeEXPERIMENT, ID: runDetail.Run.ResourceReferences[0].Key.ID}, + Name: "hello world experiment", Relationship: run_model.APIRelationshipOWNER, + }, + {Key: &run_model.APIResourceKey{ID: runDetail.Run.PipelineSpec.PipelineID, Type: run_model.APIResourceTypePIPELINEVERSION}, + Name: "hello-world.yaml", Relationship: run_model.APIRelationshipCREATOR, + }, + }, + CreatedAt: runDetail.Run.CreatedAt, + ScheduledAt: runDetail.Run.ScheduledAt, + FinishedAt: runDetail.Run.FinishedAt, + } + assert.Equal(t, expectedRun, runDetail.Run) +} + +func (s *UpgradeTests) createHelloWorldExperiment() *experiment_model.APIExperiment { + t := s.T() + + experiment := &experiment_model.APIExperiment{Name: "hello world experiment"} + helloWorldExperiment, err := s.experimentClient.Create(&experimentParams.CreateExperimentParams{Body: experiment}) + require.Nil(t, err) + + return helloWorldExperiment +} + +func (s *UpgradeTests) getHelloWorldExperiment(createIfNotExist bool) *experiment_model.APIExperiment { + t := s.T() + + experiments, err := s.experimentClient.ListAll(&experimentParams.ListExperimentParams{}, 1000) + require.Nil(t, err) + var helloWorldExperiment *experiment_model.APIExperiment + for _, experiment := range experiments { + if experiment.Name == "hello world experiment" { + helloWorldExperiment = experiment + } + } + + if helloWorldExperiment == nil && createIfNotExist { + return s.createHelloWorldExperiment() + } + + return helloWorldExperiment +} + +func (s *UpgradeTests) getHelloWorldPipeline(createIfNotExist bool) *pipeline_model.APIPipeline { + t := s.T() + + pipelines, err := s.pipelineClient.ListAll(&pipelineParams.ListPipelinesParams{}, 1000) + require.Nil(t, err) + var helloWorldPipeline *pipeline_model.APIPipeline + for _, pipeline := range pipelines { + if pipeline.Name == "hello-world.yaml" { + helloWorldPipeline = pipeline + } + } + + if helloWorldPipeline == nil && createIfNotExist { + return s.createHelloWorldPipeline() + } + + return helloWorldPipeline +} + +func (s *UpgradeTests) createHelloWorldPipeline() *pipeline_model.APIPipeline { + t := s.T() + + /* ---------- Upload pipelines YAML ---------- */ + uploadedPipeline, err := s.pipelineUploadClient.UploadFile("../resources/hello-world.yaml", uploadParams.NewUploadPipelineParams()) + require.Nil(t, err) + + helloWorldPipeline, err := s.pipelineClient.Get(&pipelineParams.GetPipelineParams{ID: uploadedPipeline.ID}) + require.Nil(t, err) + + return helloWorldPipeline +} diff --git a/backend/test/integration/visualization_api_test.go b/backend/test/integration/visualization_api_test.go index c5fc3d32f56..d299c7e8cfb 100644 --- a/backend/test/integration/visualization_api_test.go +++ b/backend/test/integration/visualization_api_test.go @@ -14,7 +14,7 @@ import ( type VisualizationApiTest struct { suite.Suite - namespace string + namespace string visualizationClient *api_server.VisualizationClient } @@ -25,12 +25,15 @@ func (s *VisualizationApiTest) SetupTest() { return } - err := test.WaitForReady(*namespace, *initializeTimeout) - if err != nil { - glog.Exitf("Failed to initialize test. Error: %v", err) + if !*isDevMode { + err := test.WaitForReady(*namespace, *initializeTimeout) + if err != nil { + glog.Exitf("Failed to initialize test. Error: %v", err) + } } s.namespace = *namespace clientConfig := test.GetClientConfig(*namespace) + var err error s.visualizationClient, err = api_server.NewVisualizationClient(clientConfig, false) if err != nil { glog.Exitf("Failed to get experiment client. Error: %v", err) @@ -43,7 +46,7 @@ func (s *VisualizationApiTest) TestVisualizationAPI() { /* ---------- Generate custom visualization --------- */ visualization := &visualization_model.APIVisualization{ Arguments: `{"code": ["print(2)"]}`, - Type: visualization_model.APIVisualizationTypeCUSTOM, + Type: visualization_model.APIVisualizationTypeCUSTOM, } customVisualization, err := s.visualizationClient.Create(¶ms.CreateVisualizationParams{ Body: visualization, diff --git a/backend/update_requirements.sh b/backend/update_requirements.sh new file mode 100755 index 00000000000..5a212069aa7 --- /dev/null +++ b/backend/update_requirements.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +# Usage: update_requirements.sh requirements.txt + +set -euo pipefail +IMAGE=${1:-"python:3.5"} +docker run --interactive --rm "$IMAGE" sh -c ' + python3 -m pip install pip setuptools --upgrade --quiet + python3 -m pip install pip-tools==5.0.0 --quiet + pip-compile --verbose --output-file - - +' diff --git a/components/OWNERS b/components/OWNERS index d4161ec38bb..3eb413c808e 100644 --- a/components/OWNERS +++ b/components/OWNERS @@ -2,8 +2,10 @@ approvers: - Ark-kun - gaoning777 - hongye-sun + - numerology reviewers: - Ark-kun - gaoning777 - hongye-sun + - numerology - animeshsingh diff --git a/components/aws/athena/query/component.yaml b/components/aws/athena/query/component.yaml index 12cc8ed50a6..8b5c748dbe5 100644 --- a/components/aws/athena/query/component.yaml +++ b/components/aws/athena/query/component.yaml @@ -20,6 +20,7 @@ inputs: - {name: database, description: 'The name of the database.'} - {name: query, description: 'The SQL query statements to be executed in Athena.'} - {name: output_path, description: 'The path to the Amazon S3 location where logs for this cluster are stored.'} + - {name: workgroup, description: 'Optional argument to provide Athena workgroup'} outputs: - {name: output_path, description: 'The path to the S3 bucket containing the query output in CSV format.'} implementation: @@ -30,7 +31,8 @@ implementation: --region, {inputValue: region}, --database, {inputValue: database}, --query, {inputValue: query}, - --output, {inputValue: output_path} + --output, {inputValue: output_path}, + --workgroup, {inputValue: workgroup} ] fileOutputs: output_path: /output.txt diff --git a/components/aws/athena/query/src/query.py b/components/aws/athena/query/src/query.py index 8563d503fc5..06a98dbc38c 100644 --- a/components/aws/athena/query/src/query.py +++ b/components/aws/athena/query/src/query.py @@ -20,73 +20,101 @@ def get_client(region=None): - """Builds a client to the AWS Athena API.""" - client = boto3.client('athena', region_name=region) - return client - -def query(client, query, database, output): - response = client.start_query_execution( - QueryString=query, - QueryExecutionContext={ - 'Database': database - }, - ResultConfiguration={ - 'OutputLocation': output, - } - ) - - execution_id = response['QueryExecutionId'] - logging.info('Execution ID: %s', execution_id) - - # Athena query is aync call, we need to fetch results and wait for execution - state = 'RUNNING' - max_execution = 5 # TODO: this should be an optional parameter from users. or use timeout - - while (max_execution > 0 and state in ['RUNNING']): - max_execution = max_execution - 1 - response = client.get_query_execution(QueryExecutionId = execution_id) - - if 'QueryExecution' in response and \ - 'Status' in response['QueryExecution'] and \ - 'State' in response['QueryExecution']['Status']: - state = response['QueryExecution']['Status']['State'] - if state == 'FAILED': - raise Exception('Athena Query Failed') - elif state == 'SUCCEEDED': - s3_path = response['QueryExecution']['ResultConfiguration']['OutputLocation'] - # could be multiple files? - filename = re.findall('.*\/(.*)', s3_path)[0] - logging.info("S3 output file name %s", filename) - break - time.sleep(5) - - # TODO:(@Jeffwan) Add more details. - result = { - 'total_bytes_processed': response['QueryExecution']['Statistics']['DataScannedInBytes'], - 'filename': filename - } - - return result + """Builds a client to the AWS Athena API.""" + client = boto3.client("athena", region_name=region) + return client + + +def query(client, query, database, output, workgroup=None): + """Executes an AWS Athena query.""" + params = dict( + QueryString=query, + QueryExecutionContext={"Database": database}, + ResultConfiguration={"OutputLocation": output,}, + ) + if workgroup: + params.update(dict(WorkGroup=workgroup)) + + response = client.start_query_execution(**params) + + execution_id = response["QueryExecutionId"] + logging.info("Execution ID: %s", execution_id) + + # Athena query is aync call, we need to fetch results and wait for execution + state = "RUNNING" + max_execution = ( + 5 # TODO: this should be an optional parameter from users. or use timeout + ) + + while max_execution > 0 and state in ["RUNNING"]: + max_execution = max_execution - 1 + response = client.get_query_execution(QueryExecutionId=execution_id) + + if ( + "QueryExecution" in response + and "Status" in response["QueryExecution"] + and "State" in response["QueryExecution"]["Status"] + ): + state = response["QueryExecution"]["Status"]["State"] + if state == "FAILED": + raise Exception("Athena Query Failed") + elif state == "SUCCEEDED": + s3_path = response["QueryExecution"]["ResultConfiguration"][ + "OutputLocation" + ] + # could be multiple files? + filename = re.findall(".*\/(.*)", s3_path)[0] + logging.info("S3 output file name %s", filename) + break + time.sleep(5) + + # TODO:(@Jeffwan) Add more details. + result = { + "total_bytes_processed": response["QueryExecution"]["Statistics"][ + "DataScannedInBytes" + ], + "filename": filename, + } + + return result -def main(): - logging.getLogger().setLevel(logging.INFO) - parser = argparse.ArgumentParser() - parser.add_argument('--region', type=str, help='Athena region.') - parser.add_argument('--database', type=str, required=True, help='The name of the database.') - parser.add_argument('--query', type=str, required=True, help='The SQL query statements to be executed in Athena.') - parser.add_argument('--output', type=str, required=False, - help='The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/') - - args = parser.parse_args() - - client = get_client(args.region) - results = query(client, args.query, args.database, args.output) - results['output'] = args.output - logging.info('Athena results: %s', results) - with open('/output.txt', 'w+') as f: - json.dump(results, f) - - -if __name__ == '__main__': - main() +def main(): + logging.getLogger().setLevel(logging.INFO) + parser = argparse.ArgumentParser() + parser.add_argument("--region", type=str, help="Athena region.") + parser.add_argument( + "--database", type=str, required=True, help="The name of the database." + ) + parser.add_argument( + "--query", + type=str, + required=True, + help="The SQL query statements to be executed in Athena.", + ) + parser.add_argument( + "--output", + type=str, + required=False, + help="The location in Amazon S3 where your query results are stored, such as s3://path/to/query/bucket/", + ) + parser.add_argument( + "--workgroup", + type=str, + required=False, + help="Optional argument to provide Athena workgroup", + ) + + args = parser.parse_args() + + client = get_client(args.region) + results = query(client, args.query, args.database, args.output, args.workgroup) + + results["output"] = args.output + logging.info("Athena results: %s", results) + with open("/output.txt", "w+") as f: + json.dump(results, f) + + +if __name__ == "__main__": + main() diff --git a/components/aws/sagemaker/.gitignore b/components/aws/sagemaker/.gitignore new file mode 100644 index 00000000000..58c9068fc5c --- /dev/null +++ b/components/aws/sagemaker/.gitignore @@ -0,0 +1,2 @@ +# Any environment variable files +**/*/.env \ No newline at end of file diff --git a/components/aws/sagemaker/Changelog.md b/components/aws/sagemaker/Changelog.md new file mode 100644 index 00000000000..bb1d8cca0e3 --- /dev/null +++ b/components/aws/sagemaker/Changelog.md @@ -0,0 +1,40 @@ +# Change log for AWS SageMaker Components + +The version of the AWS SageMaker Components is determined by the docker image tag used in YAML spec +Repository: https://hub.docker.com/repository/docker/amazon/aws-sagemaker-kfp-components + +--------------------------------------------- + +**Change log for version 0.3.0** +- Remove data_location parameters from all components + (Use "channes" parameter instead) + +> Pull requests : [#3518](https://github.com/kubeflow/pipelines/pull/3518) + + +**Change log for version 2.0 (Apr 14, 2020)** +- Fix bug in Ground Truth component +- Add user agent header to boto3 client + +> Pull requests: [#3474](https://github.com/kubeflow/pipelines/pull/3474), [#3487](https://github.com/kubeflow/pipelines/pull/3487) + + +--------------------------------------------- + +## Old + +These are the old images which were in https://hub.docker.com/r/redbackthomson/aws-kubeflow-sagemaker/tags + +**Change log 20200402** +- Fix for vpc issue +- Add license files +- Use AmazonLinux instead of Ubuntu +- Pin the pip packages + + +> Pull requests: [#3374](https://github.com/kubeflow/pipelines/pull/3374), [#3397](https://github.com/kubeflow/pipelines/pull/3397) + +No change log available for older images +Please check git log + + diff --git a/components/aws/sagemaker/Dockerfile b/components/aws/sagemaker/Dockerfile index 25c8b114bec..4f9afd02d5a 100644 --- a/components/aws/sagemaker/Dockerfile +++ b/components/aws/sagemaker/Dockerfile @@ -8,17 +8,29 @@ # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -# limitations under the License. +# limitations under the License. -FROM ubuntu:16.04 +FROM amazonlinux:2 -RUN apt-get update -y && apt-get install --no-install-recommends -y -q ca-certificates python-dev python-setuptools wget unzip - -RUN easy_install pip - -RUN pip install boto3 sagemaker pathlib2 pyyaml==3.12 +ENV PYTHONPATH /app +RUN yum update -y \ + && yum install -y -q \ + ca-certificates \ + python3 \ + wget \ + unzip + +RUN pip3 install \ + boto3==1.12.33 \ + sagemaker==1.54.0 \ + pathlib2==2.3.5 \ + pyyaml==3.12 + +COPY LICENSE.txt . +COPY NOTICE.txt . +COPY THIRD-PARTY-LICENSES.txt . COPY hyperparameter_tuning/src/hyperparameter_tuning.py . COPY train/src/train.py . COPY deploy/src/deploy.py . @@ -29,6 +41,4 @@ COPY ground_truth/src/ground_truth.py . COPY common /app/common/ -ENV PYTHONPATH /app - ENTRYPOINT [ "bash" ] diff --git a/components/aws/sagemaker/LICENSE.txt b/components/aws/sagemaker/LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/components/aws/sagemaker/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/components/aws/sagemaker/NOTICE.txt b/components/aws/sagemaker/NOTICE.txt new file mode 100644 index 00000000000..fd14bb31554 --- /dev/null +++ b/components/aws/sagemaker/NOTICE.txt @@ -0,0 +1,2 @@ +SageMaker Components for Kubeflow Pipelines +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. \ No newline at end of file diff --git a/components/aws/sagemaker/OWNERS b/components/aws/sagemaker/OWNERS new file mode 100644 index 00000000000..d4d0eaa168f --- /dev/null +++ b/components/aws/sagemaker/OWNERS @@ -0,0 +1,4 @@ +approvers: + - surajkota + - RedbackThomson + - akartsky diff --git a/components/aws/sagemaker/README.md b/components/aws/sagemaker/README.md new file mode 100644 index 00000000000..3e2ec03a039 --- /dev/null +++ b/components/aws/sagemaker/README.md @@ -0,0 +1,45 @@ +# Amazon SageMaker Components for Kubeflow Pipelines + +## Summary +With Amazon SageMaker Components for Kubeflow Pipelines (KFP), you can create and monitor training, tuning, endpoint deployment, and batch transform jobs in Amazon SageMaker. By running Kubeflow Pipeline jobs on Amazon SageMaker, you move data processing and training jobs from the Kubernetes cluster to Amazon SageMaker’s machine learning-optimized managed service. The job parameters, status, logs, and outputs from Amazon SageMaker are still accessible from the Kubeflow Pipelines UI. + + +## Components +Amazon SageMaker Components for Kubeflow Pipelines offer an alternative to launching compute-intensive jobs in Kubernetes and integrate the orchestration benefits of Kubeflow Pipelines. The following Amazon SageMaker components have been created to integrate 6 key Amazon SageMaker features into your ML workflows. You can create a Kubeflow Pipeline built entirely using these components, or integrate individual components into your workflow as needed. + +There is no additional charge for using Amazon SageMaker Components for Kubeflow Pipelines. You incur charges for any Amazon SageMaker resources you use through these components. + +### Training components + +#### Training + +The Training component allows you to submit Amazon SageMaker Training jobs directly from a Kubeflow Pipelines workflow. For more information, see [SageMaker Training Kubeflow Pipelines component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/train). + + +#### Hyperparameter Optimization + +The Hyperparameter Optimization component enables you to submit hyperparameter tuning jobs to Amazon SageMaker directly from a Kubeflow Pipelines workflow. For more information, see [SageMaker hyperparameter optimization Kubeflow Pipeline component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/hyperparameter_tuning). + + +### Inference components + +#### Hosting Deploy + +The Deploy component enables you to deploy a model in Amazon SageMaker Hosting from a Kubeflow Pipelines workflow. For more information, see [SageMaker Hosting Services - Create Endpoint Kubeflow Pipeline component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/deploy). + +#### Batch Transform component + +The Batch Transform component enables you to run inference jobs for an entire dataset in Amazon SageMaker from a Kubeflow Pipelines workflow. For more information, see [SageMaker Batch Transform Kubeflow Pipeline component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/batch_transform). + + +### Ground Truth components + +#### Ground Truth + +The Ground Truth component enables you to to submit Amazon SageMaker Ground Truth labeling jobs directly from a Kubeflow Pipelines workflow. For more information, see [SageMaker Ground Truth Kubeflow Pipelines component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/ground_truth). + +#### Workteam + +The Workteam component enables you to create Amazon SageMaker private workteam jobs directly from a Kubeflow Pipelines workflow. For more information, see [SageMaker create private workteam Kubeflow Pipelines component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/workteam). + + diff --git a/components/aws/sagemaker/THIRD-PARTY-LICENSES.txt b/components/aws/sagemaker/THIRD-PARTY-LICENSES.txt new file mode 100644 index 00000000000..fc268748af9 --- /dev/null +++ b/components/aws/sagemaker/THIRD-PARTY-LICENSES.txt @@ -0,0 +1,985 @@ +** Amazon SageMaker Components for Kubeflow Pipelines; version 0.3.1 -- +https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker +Copyright 2019-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +** boto3; version 1.12.33 -- https://github.com/boto/boto3/ +Copyright 2013-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. +** botocore; version 1.15.33 -- https://github.com/boto/botocore +Botocore +Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. +** Importlib-metadata; version 1.6.0 -- +https://importlib-metadata.readthedocs.io/en/latest/ +© Copyright 2017-2019, Jason R. Coombs, Barry Warsaw +** s3transfer; version 0.3.3 -- https://github.com/boto/s3transfer/ +s3transfer +Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. +** sagemaker; version 1.54.0 -- https://aws.amazon.com/sagemaker/ +Amazon SageMaker Python SDK +Copyright 2017-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +** smdebug-rulesconfig; version 0.1.2 -- +https://github.com/awslabs/sagemaker-debugger-rulesconfig +Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND +DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, and + distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by the + copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all other + entities that control, are controlled by, or are under common control + with that entity. For the purposes of this definition, "control" means + (i) the power, direct or indirect, to cause the direction or management + of such entity, whether by contract or otherwise, or (ii) ownership of + fifty percent (50%) or more of the outstanding shares, or (iii) + beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity exercising + permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation source, + and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but not limited + to compiled object code, generated documentation, and conversions to + other media types. + + "Work" shall mean the work of authorship, whether in Source or Object + form, made available under the License, as indicated by a copyright + notice that is included in or attached to the work (an example is + provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object form, + that is based on (or derived from) the Work and for which the editorial + revisions, annotations, elaborations, or other modifications represent, + as a whole, an original work of authorship. For the purposes of this + License, Derivative Works shall not include works that remain separable + from, or merely link (or bind by name) to the interfaces of, the Work and + Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including the original + version of the Work and any modifications or additions to that Work or + Derivative Works thereof, that is intentionally submitted to Licensor for + inclusion in the Work by the copyright owner or by an individual or Legal + Entity authorized to submit on behalf of the copyright owner. For the + purposes of this definition, "submitted" means any form of electronic, + verbal, or written communication sent to the Licensor or its + representatives, including but not limited to communication on electronic + mailing lists, source code control systems, and issue tracking systems + that are managed by, or on behalf of, the Licensor for the purpose of + discussing and improving the Work, but excluding communication that is + conspicuously marked or otherwise designated in writing by the copyright + owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity on + behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of this + License, each Contributor hereby grants to You a perpetual, worldwide, + non-exclusive, no-charge, royalty-free, irrevocable copyright license to + reproduce, prepare Derivative Works of, publicly display, publicly perform, + sublicense, and distribute the Work and such Derivative Works in Source or + Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of this + License, each Contributor hereby grants to You a perpetual, worldwide, + non-exclusive, no-charge, royalty-free, irrevocable (except as stated in + this section) patent license to make, have made, use, offer to sell, sell, + import, and otherwise transfer the Work, where such license applies only to + those patent claims licensable by such Contributor that are necessarily + infringed by their Contribution(s) alone or by combination of their + Contribution(s) with the Work to which such Contribution(s) was submitted. + If You institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work or a + Contribution incorporated within the Work constitutes direct or contributory + patent infringement, then any patent licenses granted to You under this + License for that Work shall terminate as of the date such litigation is + filed. + + 4. Redistribution. You may reproduce and distribute copies of the Work or + Derivative Works thereof in any medium, with or without modifications, and + in Source or Object form, provided that You meet the following conditions: + + (a) You must give any other recipients of the Work or Derivative Works a + copy of this License; and + + (b) You must cause any modified files to carry prominent notices stating + that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works that You + distribute, all copyright, patent, trademark, and attribution notices + from the Source form of the Work, excluding those notices that do not + pertain to any part of the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must include + a readable copy of the attribution notices contained within such NOTICE + file, excluding those notices that do not pertain to any part of the + Derivative Works, in at least one of the following places: within a + NOTICE text file distributed as part of the Derivative Works; within the + Source form or documentation, if provided along with the Derivative + Works; or, within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents of the + NOTICE file are for informational purposes only and do not modify the + License. You may add Your own attribution notices within Derivative Works + that You distribute, alongside or as an addendum to the NOTICE text from + the Work, provided that such additional attribution notices cannot be + construed as modifying the License. + + You may add Your own copyright statement to Your modifications and may + provide additional or different license terms and conditions for use, + reproduction, or distribution of Your modifications, or for any such + Derivative Works as a whole, provided Your use, reproduction, and + distribution of the Work otherwise complies with the conditions stated in + this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, any + Contribution intentionally submitted for inclusion in the Work by You to the + Licensor shall be under the terms and conditions of this License, without + any additional terms or conditions. Notwithstanding the above, nothing + herein shall supersede or modify the terms of any separate license agreement + you may have executed with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, except + as required for reasonable and customary use in describing the origin of the + Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in + writing, Licensor provides the Work (and each Contributor provides its + Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied, including, without limitation, any + warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or + FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining + the appropriateness of using or redistributing the Work and assume any risks + associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, whether + in tort (including negligence), contract, or otherwise, unless required by + applicable law (such as deliberate and grossly negligent acts) or agreed to + in writing, shall any Contributor be liable to You for damages, including + any direct, indirect, special, incidental, or consequential damages of any + character arising as a result of this License or out of the use or inability + to use the Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all other + commercial damages or losses), even if such Contributor has been advised of + the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing the Work + or Derivative Works thereof, You may choose to offer, and charge a fee for, + acceptance of support, warranty, indemnity, or other liability obligations + and/or rights consistent with this License. However, in accepting such + obligations, You may act only on Your own behalf and on Your sole + responsibility, not on behalf of any other Contributor, and only if You + agree to indemnify, defend, and hold each Contributor harmless for any + liability incurred by, or claims asserted against, such Contributor by + reason of your accepting any such warranty or additional liability. END OF + TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification +within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); + +you may not use this file except in compliance with the License. + +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software + +distributed under the License is distributed on an "AS IS" BASIS, + +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + +See the License for the specific language governing permissions and + +limitations under the License. + +* For Amazon SageMaker Components for Kubeflow Pipelines see also this required +NOTICE: + Copyright 2019-2020 Amazon.com, Inc. or its affiliates. All Rights + Reserved. +* For boto3 see also this required NOTICE: + Copyright 2013-2019 Amazon.com, Inc. or its affiliates. All Rights + Reserved. +* For botocore see also this required NOTICE: + Botocore + Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights + Reserved. +* For Importlib-metadata see also this required NOTICE: + © Copyright 2017-2019, Jason R. Coombs, Barry Warsaw +* For s3transfer see also this required NOTICE: + s3transfer + Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. +* For sagemaker see also this required NOTICE: + Amazon SageMaker Python SDK + Copyright 2017-2020 Amazon.com, Inc. or its affiliates. All Rights + Reserved. +* For smdebug-rulesconfig see also this required NOTICE: + Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. + +------ + +** docutils; version 0.15.2 -- https://docutils.sourceforge.io/ +Copyright: David Goodger + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------ + +** mock; version 1.3.0 -- https://github.com/testing-cabal/mock +Copyright (c) 2003-2013, Michael Foord & the mock team +All rights reserved. + +Copyright (c) 2003-2013, Michael Foord & the mock team +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------ + +** scandir; version 1.10.0 -- https://github.com/benhoyt/scandir +Copyright (c) 2012, Ben Hoyt + +Copyright (c) 2012, Ben Hoyt +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +* Neither the name of Ben Hoyt nor the names of its contributors may be used +to endorse or promote products derived from this software without specific +prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------ + +** enum34; version 1.1.10 -- https://pypi.python.org/pypi/enum34/1.1.6 +Copyright (c) 2013, Ethan Furman. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + Redistributions of source code must retain the above + copyright notice, this list of conditions and the + following disclaimer. + + Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + Neither the name Ethan Furman nor the names of any + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +------ + +** protobuf; version 3.11.3 -- https://github.com/protocolbuffers/protobuf +Copyright 2008 Google Inc. All rights reserved. + +Copyright 2008 Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. + +------ + +** numpy; version 1.16.6 -- https://numpy.org/ +Copyright 2020 NumPy developers. +** packaging; version 20.3 -- https://github.com/pypa/packaging +Copyright (c) Donald Stufft and individual contributors. +** python-dateutil; version 2.8.1 -- https://dateutil.readthedocs.io/en/stable/ +© Copyright 2019, dateutil +** scipy; version 1.2.3 -- https://www.scipy.org/ +Copyright © 2001, 2002 Enthought, Inc. +All rights reserved. + +Copyright © 2003-2019 SciPy Developers. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + 3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------ + +** six; version 1.14.0 -- https://github.com/benjaminp/six +copyright u'2010-2015, Benjamin Peterson +Copyright (c) 2010-2015 Benjamin Peterson + +Copyright (c) 2010-2018 Benjamin Peterson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +------ + +** pyyaml; version 3.12 -- http://pyyaml.org/ +Copyright (c) 2020 Ingy döt Net + +Copyright (c) 2006 Kirill Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------ + +** configparser; version 4.0.2 -- https://github.com/jaraco/configparser/ +Copyright Jason R. Coombs +** zipp; version 1.2.0 -- https://github.com/jaraco/zipp +Copyright Jason R. Coombs + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + +------ + +** pathlib2; version 2.3.5 -- https://pypi.org/project/pathlib2/ +Copyright (c) 2014-2017 Matthias C. M. Troffaes Copyright (c) 2012-2014 Antoine +Pitrou and contributors + +The MIT License (MIT) + +Copyright (c) 2014-2017 Matthias C. M. Troffaes Copyright (c) 2012-2014 Antoine +Pitrou and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------ + +** jmespath; version 0.9.5 -- https://pypi.org/project/jmespath/ +Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved + +Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, dis- +tribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the fol- +lowing conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- +ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + +------ + +** pyparsing; version 2.4.6 -- https://github.com/pyparsing/pyparsing +Paul McGuire +** wheel; version 0.24.0 -- https://github.com/pypa/wheel +"wheel" copyright (c) 2012-2014 Daniel Holth and +contributors. + +MIT License + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +------ + +** python-urllib3; version 1.25.8 -- https://urllib3.readthedocs.io/ +Copyright 2008-2016 Andrey Petrov and contributors (see CONTRIBUTORS.txt) + +This is the MIT license: http://www.opensource.org/licenses/mit-license.php + +Copyright 2008-2016 Andrey Petrov and contributors (see CONTRIBUTORS.txt) + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this +software and associated documentation files (the "Software"), to deal in the +Software +without restriction, including without limitation the rights to use, copy, +modify, merge, +publish, distribute, sublicense, and/or sell copies of the Software, and to +permit persons +to whom the Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be included in all +copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, +INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +BE LIABLE +FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER +DEALINGS IN THE SOFTWARE. + +------ + +** futures; version 3.3.0 -- https://github.com/agronholm/pythonfutures +opyright (c) +2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 + + 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), + and the Individual or Organization ("Licensee") accessing and otherwise + using this software ("Python") in source or binary form and its associated + documentation. + + 2. Subject to the terms and conditions of this License Agreement, PSF hereby + grants Licensee a nonexclusive, royalty-free, world-wide license to + reproduce, analyze, test, perform and/or display publicly, prepare + derivative works, distribute, and otherwise use Python alone or in any + derivative version, provided, however, that PSF's License Agreement and + PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, + 2005, 2006 Python Software Foundation; All Rights Reserved" are retained in + Python alone or in any derivative version prepared by Licensee. + + 3. In the event Licensee prepares a derivative work that is based on or + incorporates Python or any part thereof, and wants to make the derivative + work available to others as provided herein, then Licensee hereby agrees to + include in any such work a brief summary of the changes made to Python. + + 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES + NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT + NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF + MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF + PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + + 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY + INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF + MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE + THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between PSF and + Licensee. This License Agreement does not grant permission to use PSF + trademarks or trade name in a trademark sense to endorse or promote products + or services of Licensee, or any third party. + + 8. By copying, installing or otherwise using Python, Licensee agrees to be + bound by the terms and conditions of this License Agreement. BEOPEN.COM + LICENSE AGREEMENT FOR PYTHON 2.0 + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + + 1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an office + at 160 Saratoga Avenue, Santa Clara, CA 95051, and the Individual or + Organization ("Licensee") accessing and otherwise using this software in + source or binary form and its associated documentation ("the Software"). + + 2. Subject to the terms and conditions of this BeOpen Python License + Agreement, BeOpen hereby grants Licensee a non-exclusive, royalty-free, + world-wide license to reproduce, analyze, test, perform and/or display + publicly, prepare derivative works, distribute, and otherwise use the + Software alone or in any derivative version, provided, however, that the + BeOpen Python License is retained in the Software, alone or in any + derivative version prepared by Licensee. + + 3. BeOpen is making the Software available to Licensee on an "AS IS" basis. + BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF + EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND DISCLAIMS ANY + REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR + PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT INFRINGE ANY THIRD PARTY + RIGHTS. + + 4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE SOFTWARE + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF + USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY DERIVATIVE THEREOF, + EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + + 5. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 6. This License Agreement shall be governed by and interpreted in all + respects by the law of the State of California, excluding conflict of law + provisions. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between BeOpen and + Licensee. This License Agreement does not grant permission to use BeOpen + trademarks or trade names in a trademark sense to endorse or promote + products or services of Licensee, or any third party. As an exception, the + "BeOpen Python" logos available at http://www.pythonlabs.com/logos.html may + be used according to the permissions granted on that web page. + + 7. By copying, installing or otherwise using the software, Licensee agrees + to be bound by the terms and conditions of this License Agreement. CNRI OPEN + SOURCE LICENSE AGREEMENT (for Python 1.6b1) IMPORTANT: PLEASE READ THE + FOLLOWING AGREEMENT CAREFULLY. + +BY CLICKING ON "ACCEPT" WHERE INDICATED BELOW, OR BY COPYING, INSTALLING OR +OTHERWISE USING PYTHON 1.6, beta 1 SOFTWARE, YOU ARE DEEMED TO HAVE AGREED TO +THE TERMS AND CONDITIONS OF THIS LICENSE AGREEMENT. + + 1. This LICENSE AGREEMENT is between the Corporation for National Research + Initiatives, having an office at 1895 Preston White Drive, Reston, VA 20191 + ("CNRI"), and the Individual or Organization ("Licensee") accessing and + otherwise using Python 1.6, beta 1 software in source or binary form and its + associated documentation, as released at the www.python.org Internet site on + August 4, 2000 ("Python 1.6b1"). + + 2. Subject to the terms and conditions of this License Agreement, CNRI + hereby grants Licensee a non-exclusive, royalty-free, world-wide license to + reproduce, analyze, test, perform and/or display publicly, prepare + derivative works, distribute, and otherwise use Python 1.6b1 alone or in any + derivative version, provided, however, that CNRIs License Agreement is + retained in Python 1.6b1, alone or in any derivative version prepared by + Licensee. + + Alternately, in lieu of CNRIs License Agreement, Licensee may substitute the + following text (omitting the quotes): "Python 1.6, beta 1, is made available + subject to the terms and conditions in CNRIs License Agreement. This + Agreement may be located on the Internet using the following unique, + persistent identifier (known as a handle): 1895.22/1011. This Agreement may + also be obtained from a proxy server on the Internet using the + URL:http://hdl.handle.net/1895.22/1011". + + 3. In the event Licensee prepares a derivative work that is based on or + incorporates Python 1.6b1 or any part thereof, and wants to make the + derivative work available to the public as provided herein, then Licensee + hereby agrees to indicate in any such work the nature of the modifications + made to Python 1.6b1. + + 4. CNRI is making Python 1.6b1 available to Licensee on an "AS IS" basis. + CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF + EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND DISCLAIMS ANY REPRESENTATION + OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT + THE USE OF PYTHON 1.6b1 WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + + 5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE SOFTWARE + FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF + USING, MODIFYING OR DISTRIBUTING PYTHON 1.6b1, OR ANY DERIVATIVE THEREOF, + EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + + 6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + + 7. This License Agreement shall be governed by and interpreted in all + respects by the law of the State of Virginia, excluding conflict of law + provisions. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between CNRI and + Licensee. This License Agreement does not grant permission to use CNRI + trademarks or trade name in a trademark sense to endorse or promote products + or services of Licensee, or any third party. + + 8. By clicking on the "ACCEPT" button where indicated, or by copying, + installing or otherwise using Python 1.6b1, Licensee agrees to be bound by + the terms and conditions of this License Agreement. ACCEPT CWI LICENSE + AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, The +Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, provided that +the above copyright notice appear in all copies and that both that copyright +notice and this permission notice appear in supporting documentation, and that +the name of Stichting Mathematisch Centrum or CWI not be used in advertising or +publicity pertaining to distribution of the software without specific, written +prior permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN +NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE FOR ANY SPECIAL, +INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. + + + +------ + +** contextlib2; version 0.6.0 -- https://contextlib2.readthedocs.io/en/stable/ +This is a Jazzband project. By contributing you agree to abide by the +Contributor Code of Conduct and follow the guidelines. + +A. HISTORY OF THE SOFTWARE +========================== + +contextlib2 is a derivative of the contextlib module distributed by the PSF +as part of the Python standard library. According, it is itself redistributed +under the PSF license (reproduced in full below). As the contextlib module +was added only in Python 2.5, the licenses for earlier Python versions are +not applicable and have not been included. + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations (now Zope +Corporation, see http://www.zope.com). In 2001, the Python Software +Foundation (PSF, see http://www.python.org/psf/) was formed, a +non-profit organization created specifically to own Python-related +Intellectual Property. Zope Corporation is a sponsoring member of +the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases that included the contextlib module. + + Release Derived Year Owner GPL- + from compatible? (1) + + 2.5 2.4 2006 PSF yes + 2.5.1 2.5 2007 PSF yes + 2.5.2 2.5.1 2008 PSF yes + 2.5.3 2.5.2 2008 PSF yes + 2.6 2.5 2008 PSF yes + 2.6.1 2.6 2008 PSF yes + 2.6.2 2.6.1 2009 PSF yes + 2.6.3 2.6.2 2009 PSF yes + 2.6.4 2.6.3 2009 PSF yes + 2.6.5 2.6.4 2010 PSF yes + 3.0 2.6 2008 PSF yes + 3.0.1 3.0 2009 PSF yes + 3.1 3.0.1 2009 PSF yes + 3.1.1 3.1 2009 PSF yes + 3.1.2 3.1.1 2010 PSF yes + 3.1.3 3.1.2 2010 PSF yes + 3.1.4 3.1.3 2011 PSF yes + 3.2 3.1 2011 PSF yes + 3.2.1 3.2 2011 PSF yes + 3.2.2 3.2.1 2011 PSF yes + 3.3 3.2 2012 PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, +2010, +2011 Python Software Foundation; All Rights Reserved" are retained in Python +alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + + +------ + +** protobuf3-to-dict; version 0.1.5 -- +https://github.com/kaporzhu/protobuf-to-dict +In jurisdictions that recognize copyright laws, the author or authors of this +software dedicate any and all copyright interest in the software to the public +domain. + +This is free and unencumbered software released into the public domain +by its author, Ben Hodgson . + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognise copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to \ No newline at end of file diff --git a/components/aws/sagemaker/batch_transform/README.md b/components/aws/sagemaker/batch_transform/README.md new file mode 100644 index 00000000000..d71c03798bd --- /dev/null +++ b/components/aws/sagemaker/batch_transform/README.md @@ -0,0 +1,96 @@ +# SageMaker Batch Transform Kubeflow Pipeline component + +## Summary +Component to get inferences for an entire dataset in SageMaker from a Kubeflow Pipelines workflow. + +## Details +With [batch transform](https://docs.aws.amazon.com/sagemaker/latest/dg/how-it-works-batch.html), you create a batch transform job using a trained model and the dataset, which must be stored in Amazon S3. Use batch transform when you: + +* Want to get inferences for an entire dataset and index them to serve inferences in real time +* Don't need a persistent endpoint that applications (for example, web or mobile apps) can call to get inferences +* Don't need the subsecond latency that Amazon SageMaker hosted endpoints provide + +## Intended Use +Create a transform job in AWS SageMaker. + +## Runtime Arguments +Argument | Description | Optional (in pipeline definition) | Optional (in UI) | Data type | Accepted values | Default | +:--- | :---------- | :---------- | :---------- | :----------| :---------- | :----------| +region | The region where the endpoint is created | No | No | String | | | +endpoint_url | The endpoint URL for the private link VPC endpoint | Yes | Yes | String | | | +job_name | The name of the transform job. The name must be unique within an AWS Region in an AWS account | Yes | Yes | String | | is a generated name (combination of model_name and 'BatchTransform' string)| +model_name | The name of the model that you want to use for the transform job. Model name must be the name of an existing Amazon SageMaker model within an AWS Region in an AWS account | No | No | String | | | +max_concurrent | The maximum number of parallel requests that can be sent to each instance in a transform job | Yes | Yes | Integer | | 0 | +max_payload | The maximum allowed size of the payload, in MB | Yes | Yes | Integer | The value in max_payload must be greater than, or equal to, the size of a single record | 6 | +batch_strategy | The number of records to include in a mini-batch for an HTTP inference request | Yes | Yes | String | | | +environment | The environment variables to set in the Docker container | Yes | Yes | Dict | Maximum length of 1024. Key Pattern: `[a-zA-Z_][a-zA-Z0-9_]*`. Value Pattern: `[\S\s]*`. Upto 16 key and values entries in the map | | + +The following parameters construct [`TransformInput`](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_TransformInput.html) object of the CreateTransformJob API. These describe the input source and the way the transform job consumes it. + +Argument | Description | Optional (in pipeline definition) | Optional (in UI) | Data type | Accepted values | Default | +:--- | :---------- | :---------- | :---------- | :----------| :---------- | :----------| +input_location | The S3 location of the data source that is associated with a channel. [Read more on S3Uri](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_TransformS3DataSource.html) | No | No | String | | | +data_type | Used by SageMaker to identify the objects from the S3 bucket to be used for batch transform. [Read more on S3DataType](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_TransformS3DataSource.html) | Yes | Yes | String | `ManifestFile`, `S3Prefix`, `AugmentedManifestFile`| `S3Prefix` | +content_type | The multipurpose internet mail extension (MIME) type of the data. Amazon SageMaker uses the MIME type with each http call to transfer data to the transform job | Yes | Yes | String | | | +split_type | The method to use to split the transform job data files into smaller batches | Yes | Yes | String | `Line`, `RecordIO`, `TFRecord`, `None` | `None` | +compression_type | If the transform data is compressed, specify the compression type | Yes | Yes | String | `GZip`, `None` | `None` | + +* `input_location` and `data_type` parameters above are used to construct [`S3DataSource`](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_TransformS3DataSource.html) object which is part of [`TransformDataSource`](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_TransformDataSource.html) object in [`TransformInput`](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_TransformInput.html) part of the CreateTransformJob API. +``` +TransformInput={ + 'DataSource': { + 'S3DataSource': { + 'S3DataType': 'ManifestFile'|'S3Prefix'|'AugmentedManifestFile', + 'S3Uri': 'string' + } + }, + ... other input parameters ... + } +``` +[Ref](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.create_transform_job) + +The following parameters are used to construct [`TransformOutput`](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_TransformOutput.html) object of the CreateTransformJob API. These describe the results of a transform job. + +Argument | Description | Optional (in pipeline definition) | Optional (in UI) | Data type | Accepted values | Default | +:--- | :---------- | :---------- | :---------- | :----------| :---------- | :----------| +output_location | The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job | No | No | String | | | +accept | The MIME type used to specify the output data. Amazon SageMaker uses the MIME type with each http call to transfer data from the transform job | Yes | Yes | String | | | +assemble_with | Defines how to assemble the results of the transform job as a single S3 object. To concatenate the results in binary format, specify None. To add a newline character at the end of every transformed record, specify Line | Yes | Yes | String | `Line`, `None` | `None`| +output_encryption_key | The AWS Key Management Service key to encrypt the model artifacts at rest using Amazon S3 server-side encryption | Yes | Yes | String | [KmsKeyId formats](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_TransformOutput.html) | | + +The following parameters are used to construct [`TransformResources`](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_TransformResources.html) object of the CreateTransformJob API. These describe the resources, including ML instance types and ML instance count, to use for the transform job. + +Argument | Description | Optional (in pipeline definition) | Optional (in UI) | Data type | Accepted values | Default | +:--- | :---------- | :---------- | :---------- | :----------| :---------- | :----------| +instance_type | The ML compute instance type for the transform job | Yes | Yes | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge | ml.m4.xlarge | +instance_count | The number of ML compute instances to use in the transform job | Yes | Yes | Integer | | 1 | +resource_encryption_key | The AWS Key Management Service (AWS KMS) key used to encrypt model data on the storage volume attached to the ML compute instance(s) that run the batch transform job. | Yes | Yes | String | [VolumeKmsKeyId formats](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_TransformResources.html) | | + +The following parameters are used to construct [`DataProcessing`](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_DataProcessing.html) object of the CreateTransformJob API. The data structure used to specify the data to be used for inference in a batch transform job and to associate the data that is relevant to the prediction results in the output. + +Argument | Description | Optional (in pipeline definition) | Optional (in UI) | Data type | Accepted values | Default | +:--- | :---------- | :---------- | :---------- | :----------| :---------- | :----------| +input_filter | A JSONPath expression used to select a portion of the input data to pass to the algorithm. [ReadMore on InputFilter](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_DataProcessing.html) | Yes | Yes | String | | | +output_filter | A JSONPath expression used to select a portion of the joined dataset to save in the output file for a batch transform job. [ReadMore on OutputFilter](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_DataProcessing.html) | Yes | Yes | String | | | +join_source | Specifies the source of the data to join with the transformed data. [ReadMore on JoinSource](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_DataProcessing.html) | Yes | Yes | String | `Input`, `None` | None | + +Notes: +* Please use the links in the [Resources section](#Resources) for detailed information on each input parameter and SageMaker APIs used in this component + +## Outputs +Name | Description +:--- | :---------- +output_location | The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job + +## Requirements +* [Kubeflow pipelines SDK](https://www.kubeflow.org/docs/pipelines/sdk/install-sdk/) +* [Kubeflow set-up](https://www.kubeflow.org/docs/aws/deploy/install-kubeflow/) + +## Samples +### Integrated into a pipeline +MNIST Classification pipeline: [Pipeline](https://github.com/kubeflow/pipelines/blob/master/samples/contrib/aws-samples/mnist-kmeans-sagemaker/mnist-classification-pipeline.py) | [Steps](https://github.com/kubeflow/pipelines/blob/master/samples/contrib/aws-samples/mnist-kmeans-sagemaker/README.md) + +## Resources +* [Batch Transform on SageMaker](https://docs.aws.amazon.com/sagemaker/latest/dg/how-it-works-batch.html) +* [Create Transform Job API documentation](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_CreateTransformJob.html) +* [Boto3 API reference](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.create_transform_job) diff --git a/components/aws/sagemaker/batch_transform/component.yaml b/components/aws/sagemaker/batch_transform/component.yaml index 780471f3078..b585ecbcf56 100644 --- a/components/aws/sagemaker/batch_transform/component.yaml +++ b/components/aws/sagemaker/batch_transform/component.yaml @@ -4,78 +4,102 @@ description: | inputs: - name: region description: 'The region where the cluster launches.' + type: String - name: job_name description: 'The name of the batch transform job.' default: '' + type: String - name: model_name description: 'The name of the model that you want to use for the transform job.' + type: String - name: max_concurrent description: 'The maximum number of parallel requests that can be sent to each instance in a transform job.' default: '0' + type: Integer - name: max_payload description: 'The maximum allowed size of the payload, in MB.' default: '6' + type: Integer - name: batch_strategy description: 'The number of records to include in a mini-batch for an HTTP inference request.' default: '' + type: String - name: environment description: 'The environment variables to set in the Docker container. Up to 16 key-value entries in the map.' default: '{}' + type: JsonObject - name: input_location description: 'The S3 location of the data source that is associated with a channel.' + type: String - name: data_type description: 'Data type of the input. Can be ManifestFile, S3Prefix, or AugmentedManifestFile.' default: 'S3Prefix' + type: String - name: content_type description: 'The multipurpose internet mail extension (MIME) type of the data.' default: '' + type: String - name: split_type description: 'The method to use to split the transform job data files into smaller batches.' default: 'None' + type: String - name: compression_type description: 'If the transform data is compressed, the specification of the compression type.' default: 'None' + type: String - name: output_location description: 'The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job.' + type: String - name: accept description: 'The MIME type used to specify the output data.' default: '' + type: String - name: assemble_with description: 'Defines how to assemble the results of the transform job as a single S3 object. Either None or Line.' default: '' + type: String - name: output_encryption_key description: 'The AWS Key Management Service ID of the key used to encrypt the output data.' default: '' + type: String - name: input_filter description: 'A JSONPath expression used to select a portion of the input data to pass to the algorithm.' default: '' + type: String - name: output_filter description: 'A JSONPath expression used to select a portion of the joined dataset to save in the output file for a batch transform job.' default: '' + type: String - name: join_source description: 'Specifies the source of the data to join with the transformed data.' default: 'None' + type: String - name: instance_type description: 'The ML compute instance type.' default: 'ml.m4.xlarge' + type: String - name: instance_count description: 'The number of ML compute instances to use in each training job.' default: '1' + type: Integer - name: resource_encryption_key description: 'The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).' default: '' + type: String - name: endpoint_url description: 'The endpoint URL for the private link VPC endpoint.' default: '' + type: String - name: tags description: 'Key-value pairs to categorize AWS resources.' default: '{}' + type: JsonObject outputs: - {name: output_location, description: 'S3 URI of the transform job results.'} implementation: container: - image: redbackthomson/aws-kubeflow-sagemaker:20191003 - command: ['python'] + image: amazon/aws-sagemaker-kfp-components:0.3.1 + command: ['python3'] args: [ batch_transform.py, --region, {inputValue: region}, diff --git a/components/aws/sagemaker/batch_transform/src/batch_transform.py b/components/aws/sagemaker/batch_transform/src/batch_transform.py index 9ff2b85fcb9..bc38b0cd1e7 100644 --- a/components/aws/sagemaker/batch_transform/src/batch_transform.py +++ b/components/aws/sagemaker/batch_transform/src/batch_transform.py @@ -26,31 +26,29 @@ def create_parser(): parser = argparse.ArgumentParser(description='SageMaker Batch Transformation Job') _utils.add_default_client_arguments(parser) - parser.add_argument('--job_name', type=str.strip, required=False, help='The name of the transform job.', default='') - parser.add_argument('--model_name', type=str.strip, required=True, help='The name of the model that you want to use for the transform job.') - parser.add_argument('--max_concurrent', type=_utils.str_to_int, required=False, help='The maximum number of parallel requests that can be sent to each instance in a transform job.', default='0') - parser.add_argument('--max_payload', type=_utils.str_to_int, required=False, help='The maximum allowed size of the payload, in MB.', default='6') - parser.add_argument('--batch_strategy', choices=['MultiRecord', 'SingleRecord', ''], type=str.strip, required=False, help='The number of records to include in a mini-batch for an HTTP inference request.', default='') - parser.add_argument('--environment', type=_utils.str_to_json_dict, required=False, help='The dictionary of the environment variables to set in the Docker container. Up to 16 key-value entries in the map.', default='{}') - parser.add_argument('--input_location', type=str.strip, required=True, help='The S3 location of the data source that is associated with a channel.') - parser.add_argument('--data_type', choices=['ManifestFile', 'S3Prefix', 'AugmentedManifestFile', ''], type=str.strip, required=False, help='Data type of the input. Can be ManifestFile, S3Prefix, or AugmentedManifestFile.', default='S3Prefix') - parser.add_argument('--content_type', type=str.strip, required=False, help='The multipurpose internet mail extension (MIME) type of the data.', default='') - parser.add_argument('--split_type', choices=['None', 'Line', 'RecordIO', 'TFRecord', ''], type=str.strip, required=False, help='The method to use to split the transform job data files into smaller batches.', default='None') - parser.add_argument('--compression_type', choices=['None', 'Gzip', ''], type=str.strip, required=False, help='If the transform data is compressed, the specification of the compression type.', default='None') - parser.add_argument('--output_location', type=str.strip, required=True, help='The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job.') - parser.add_argument('--accept', type=str.strip, required=False, help='The MIME type used to specify the output data.') - parser.add_argument('--assemble_with', choices=['None', 'Line', ''], type=str.strip, required=False, help='Defines how to assemble the results of the transform job as a single S3 object. Either None or Line.') - parser.add_argument('--output_encryption_key', type=str.strip, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.', default='') - parser.add_argument('--input_filter', type=str.strip, required=False, help='A JSONPath expression used to select a portion of the input data to pass to the algorithm.', default='') - parser.add_argument('--output_filter', type=str.strip, required=False, help='A JSONPath expression used to select a portion of the joined dataset to save in the output file for a batch transform job.', default='') - parser.add_argument('--join_source', choices=['None', 'Input', ''], type=str.strip, required=False, help='Specifies the source of the data to join with the transformed data.', default='None') - parser.add_argument('--instance_type', choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge'], type=str.strip, required=True, help='The ML compute instance type for the transform job.', default='ml.m4.xlarge') - parser.add_argument('--instance_count', type=_utils.str_to_int, required=False, help='The number of ML compute instances to use in the transform job.') - parser.add_argument('--resource_encryption_key', type=str.strip, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') - parser.add_argument('--tags', type=_utils.str_to_json_dict, required=False, help='An array of key-value pairs, to categorize AWS resources.', default='{}') - parser.add_argument('--output_location_file', type=str.strip, required=True, help='File path where the program will write the Amazon S3 URI of the transform job results.') + parser.add_argument('--job_name', type=str, required=False, help='The name of the transform job.', default='') + parser.add_argument('--model_name', type=str, required=True, help='The name of the model that you want to use for the transform job.') + parser.add_argument('--max_concurrent', type=int, required=False, help='The maximum number of parallel requests that can be sent to each instance in a transform job.', default='0') + parser.add_argument('--max_payload', type=int, required=False, help='The maximum allowed size of the payload, in MB.', default='6') + parser.add_argument('--batch_strategy', choices=['MultiRecord', 'SingleRecord', ''], type=str, required=False, help='The number of records to include in a mini-batch for an HTTP inference request.', default='') + parser.add_argument('--environment', type=_utils.yaml_or_json_str, required=False, help='The dictionary of the environment variables to set in the Docker container. Up to 16 key-value entries in the map.', default={}) + parser.add_argument('--input_location', type=str, required=True, help='The S3 location of the data source that is associated with a channel.') + parser.add_argument('--data_type', choices=['ManifestFile', 'S3Prefix', 'AugmentedManifestFile', ''], type=str, required=False, help='Data type of the input. Can be ManifestFile, S3Prefix, or AugmentedManifestFile.', default='S3Prefix') + parser.add_argument('--content_type', type=str, required=False, help='The multipurpose internet mail extension (MIME) type of the data.', default='') + parser.add_argument('--split_type', choices=['None', 'Line', 'RecordIO', 'TFRecord', ''], type=str, required=False, help='The method to use to split the transform job data files into smaller batches.', default='None') + parser.add_argument('--compression_type', choices=['None', 'Gzip', ''], type=str, required=False, help='If the transform data is compressed, the specification of the compression type.', default='None') + parser.add_argument('--output_location', type=str, required=True, help='The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job.') + parser.add_argument('--accept', type=str, required=False, help='The MIME type used to specify the output data.') + parser.add_argument('--assemble_with', choices=['None', 'Line', ''], type=str, required=False, help='Defines how to assemble the results of the transform job as a single S3 object. Either None or Line.') + parser.add_argument('--output_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.', default='') + parser.add_argument('--input_filter', type=str, required=False, help='A JSONPath expression used to select a portion of the input data to pass to the algorithm.', default='') + parser.add_argument('--output_filter', type=str, required=False, help='A JSONPath expression used to select a portion of the joined dataset to save in the output file for a batch transform job.', default='') + parser.add_argument('--join_source', choices=['None', 'Input', ''], type=str, required=False, help='Specifies the source of the data to join with the transformed data.', default='None') + parser.add_argument('--instance_type', type=str, required=False, help='The ML compute instance type for the transform job.', default='ml.m4.xlarge') + parser.add_argument('--instance_count', type=int, required=False, help='The number of ML compute instances to use in the transform job.') + parser.add_argument('--resource_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') + parser.add_argument('--tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) + parser.add_argument('--output_location_file', type=str, required=True, help='File path where the program will write the Amazon S3 URI of the transform job results.') return parser diff --git a/components/aws/sagemaker/codebuild/deploy.buildspec.yml b/components/aws/sagemaker/codebuild/deploy.buildspec.yml new file mode 100644 index 00000000000..4c3c7534f0f --- /dev/null +++ b/components/aws/sagemaker/codebuild/deploy.buildspec.yml @@ -0,0 +1,12 @@ +version: 0.2 + +phases: + pre_build: + commands: + # Log in to Dockerhub + - docker login -u $DOCKER_CONFIG_USERNAME -p $DOCKER_CONFIG_PASSWORD + + build: + commands: + - cd components/aws/sagemaker + - ./codebuild/scripts/deploy.sh -d "${DRY_RUN}" \ No newline at end of file diff --git a/components/aws/sagemaker/codebuild/integration-test.buildspec.yml b/components/aws/sagemaker/codebuild/integration-test.buildspec.yml new file mode 100644 index 00000000000..09dafe53cd4 --- /dev/null +++ b/components/aws/sagemaker/codebuild/integration-test.buildspec.yml @@ -0,0 +1,28 @@ +version: 0.2 + +env: + variables: + CONTAINER_VARIABLES: "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI EKS_PRIVATE_SUBNETS EKS_PUBLIC_SUBNETS PYTEST_ADDOPTS S3_DATA_BUCKET EKS_EXISTING_CLUSTER SAGEMAKER_EXECUTION_ROLE_ARN REGION" + +phases: + build: + commands: + - cd components/aws + - docker build . -f ./sagemaker/tests/integration_tests/Dockerfile -t amazon/integration-test-image --quiet + + - cd sagemaker/codebuild/scripts && export CONTAINER_VARIABLE_ARGUMENTS="$(./construct_environment_array.sh)" + + # Run the container and copy the results to /tmp + # Passes all listed host environment variables through to the container + - docker run --name integration-test-container $(echo $CONTAINER_VARIABLE_ARGUMENTS) amazon/integration-test-image + + post_build: + commands: + - docker cp integration-test-container:/tests/integration_tests/integration_tests.log /tmp/results.xml + - docker rm -f integration-test-container + +reports: + IntegrationTestReport: + files: + - "results.xml" + base-directory: "/tmp" \ No newline at end of file diff --git a/components/aws/sagemaker/codebuild/scripts/construct_environment_array.sh b/components/aws/sagemaker/codebuild/scripts/construct_environment_array.sh new file mode 100755 index 00000000000..249108d8bf5 --- /dev/null +++ b/components/aws/sagemaker/codebuild/scripts/construct_environment_array.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +# This script breaks up a string of environment variable names into a list of +# parameters that `docker run` accepts. This needs to be made into a script +# for CodeBuild because these commands do not run in dash - the default terminal +# on the CodeBuild standard images. + +IFS=' ' read -a variable_array <<< $CONTAINER_VARIABLES +printf -v CONTAINER_VARIABLE_ARGUMENTS -- "--env %s " "${variable_array[@]}" +echo $CONTAINER_VARIABLE_ARGUMENTS \ No newline at end of file diff --git a/components/aws/sagemaker/codebuild/scripts/deploy.sh b/components/aws/sagemaker/codebuild/scripts/deploy.sh new file mode 100755 index 00000000000..09bf596b1d2 --- /dev/null +++ b/components/aws/sagemaker/codebuild/scripts/deploy.sh @@ -0,0 +1,78 @@ +#!/usr/bin/env bash + +set -e + +REMOTE_REPOSITORY="amazon/aws-sagemaker-kfp-components" +DRYRUN="true" +FULL_VERSION_TAG="" +DOCKER_CONFIG_PATH=${DOCKER_CONFIG_PATH:-"/root/.docker"} + +while getopts ":d:v:" opt; do + case ${opt} in + d) + if [[ "${OPTARG}" = "false" ]]; then + DRYRUN="false" + else + DRYRUN="true" + fi + ;; + v) + FULL_VERSION_TAG="${OPTARG}" + ;; + esac +done + +function docker_tag_exists() { + curl --silent -f -lSL https://index.docker.io/v1/repositories/$1/tags/$2 > /dev/null 2> /dev/null +} + +if [[ ! -z "${FULL_VERSION_TAG}" && ! "${FULL_VERSION_TAG}" =~ ^[0-9]+\.[0-9]+\.[0-9]+ ]]; then + >&2 echo "Version tag does not match SEMVER style (X.Y.Z)" + exit 1 +fi + +# Check version does not already exist +VERSION_LICENSE_FILE="THIRD-PARTY-LICENSES.txt" +if [[ -z "${FULL_VERSION_TAG}" ]]; then + FULL_VERSION_TAG="$(cat ${VERSION_LICENSE_FILE} | head -n1 | grep -Po '(?<=version )\d.\d.\d')" +fi + +if [ -z "$FULL_VERSION_TAG" ]; then + >&2 echo "Could not find version inside ${VERSION_LICENSE_FILE} file." + exit 1 +fi + +echo "Deploying version ${FULL_VERSION_TAG}" + +if docker_tag_exists "$REMOTE_REPOSITORY" "$FULL_VERSION_TAG"; then + >&2 echo "Tag ${REMOTE_REPOSITORY}:${FULL_VERSION_TAG} already exists. Cannot overwrite an existing image." + exit 1 +fi + +# Build the image +FULL_VERSION_IMAGE="${REMOTE_REPOSITORY}:${FULL_VERSION_TAG}" +docker build . -f Dockerfile -t "${FULL_VERSION_IMAGE}" + +# Get the minor and major versions +[[ $FULL_VERSION_TAG =~ ^[0-9]+\.[0-9]+ ]] && MINOR_VERSION_IMAGE="${REMOTE_REPOSITORY}:${BASH_REMATCH[0]}" +[[ $FULL_VERSION_TAG =~ ^[0-9]+ ]] && MAJOR_VERSION_IMAGE="${REMOTE_REPOSITORY}:${BASH_REMATCH[0]}" + +# Re-tag the image with major and minor versions +docker tag "${FULL_VERSION_IMAGE}" "${MINOR_VERSION_IMAGE}" +echo "Tagged image with ${MINOR_VERSION_IMAGE}" +docker tag "${FULL_VERSION_IMAGE}" "${MAJOR_VERSION_IMAGE}" +echo "Tagged image with ${MAJOR_VERSION_IMAGE}" + +# Push to the remote repository +if [ "${DRYRUN}" == "false" ]; then + docker --config "$DOCKER_CONFIG_PATH" push "${FULL_VERSION_IMAGE}" + echo "Successfully pushed tag ${FULL_VERSION_IMAGE} to Docker Hub" + + docker --config "$DOCKER_CONFIG_PATH" push "${MINOR_VERSION_IMAGE}" + echo "Successfully pushed tag ${MINOR_VERSION_IMAGE} to Docker Hub" + + docker --config "$DOCKER_CONFIG_PATH" push "${MAJOR_VERSION_IMAGE}" + echo "Successfully pushed tag ${MAJOR_VERSION_IMAGE} to Docker Hub" +else + echo "Dry run detected. Not pushing images." +fi \ No newline at end of file diff --git a/components/aws/sagemaker/codebuild/unit-test.buildspec.yml b/components/aws/sagemaker/codebuild/unit-test.buildspec.yml new file mode 100644 index 00000000000..4d688494470 --- /dev/null +++ b/components/aws/sagemaker/codebuild/unit-test.buildspec.yml @@ -0,0 +1,21 @@ +version: 0.2 +phases: + build: + commands: + - cd components/aws + - docker build . -f ./sagemaker/tests/unit_tests/Dockerfile -t amazon/unit-test-image --quiet + + # Run the container and copy the results to /tmp + # Passes all host environment variables through to the container + - docker run --name unit-test-container $(env | cut -f1 -d= | sed 's/^/-e /') amazon/unit-test-image + + post_build: + commands: + - docker cp unit-test-container:/app/tests/unit_tests/unit_tests.log /tmp/results.xml + - docker rm -f unit-test-container + +reports: + UnitTestReport: + files: + - "results.xml" + base-directory: "/tmp" \ No newline at end of file diff --git a/components/aws/sagemaker/common/_utils.py b/components/aws/sagemaker/common/_utils.py index 3dc2e3c7ba6..18e005d9e99 100644 --- a/components/aws/sagemaker/common/_utils.py +++ b/components/aws/sagemaker/common/_utils.py @@ -13,13 +13,16 @@ import os import argparse from time import gmtime, strftime +from distutils.util import strtobool import time import string import random import json import yaml +import re import boto3 +import botocore from botocore.exceptions import ClientError from sagemaker.amazon.amazon_estimator import get_image_uri @@ -52,15 +55,43 @@ # Get current directory to open templates __cwd__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))) + +def nullable_string_argument(value): + value = value.strip() + if not value: + return None + return value + + def add_default_client_arguments(parser): - parser.add_argument('--region', type=str.strip, required=True, help='The region where the training job launches.') - parser.add_argument('--endpoint_url', type=str.strip, required=False, help='The URL to use when communicating with the Sagemaker service.') + parser.add_argument('--region', type=str, required=True, help='The region where the training job launches.') + parser.add_argument('--endpoint_url', type=nullable_string_argument, required=False, help='The URL to use when communicating with the Sagemaker service.') + + +def get_component_version(): + """Get component version from the first line of License file""" + component_version = 'NULL' + + # Get license file using known common directory + license_file_path = os.path.abspath(os.path.join(__cwd__, '../THIRD-PARTY-LICENSES.txt')) + with open(license_file_path, 'r') as license_file: + version_match = re.search('Amazon SageMaker Components for Kubeflow Pipelines; version (([0-9]+[.])+[0-9]+)', + license_file.readline()) + if version_match is not None: + component_version = version_match.group(1) + + return component_version + def get_sagemaker_client(region, endpoint_url=None): """Builds a client to the AWS SageMaker API.""" - client = boto3.client('sagemaker', region_name=region, endpoint_url=endpoint_url) + session_config = botocore.config.Config( + user_agent='sagemaker-on-kubeflow-pipelines-v{}'.format(get_component_version()) + ) + client = boto3.client('sagemaker', region_name=region, endpoint_url=endpoint_url, config=session_config) return client + def create_training_job_request(args): ### Documentation: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.create_training_job with open(os.path.join(__cwd__, 'train.template.yaml'), 'r') as f: @@ -70,7 +101,7 @@ def create_training_job_request(args): request['TrainingJobName'] = job_name request['RoleArn'] = args['role'] - request['HyperParameters'] = args['hyperparameters'] + request['HyperParameters'] = create_hyperparameters(args['hyperparameters']) request['AlgorithmSpecification']['TrainingInputMode'] = args['training_input_mode'] ### Update training image (for BYOC and built-in algorithms) or algorithm resource name @@ -108,19 +139,14 @@ def create_training_job_request(args): ### Update or pop VPC configs if args['vpc_security_group_ids'] and args['vpc_subnets']: - request['VpcConfig']['SecurityGroupIds'] = [args['vpc_security_group_ids']] - request['VpcConfig']['Subnets'] = [args['vpc_subnets']] + request['VpcConfig']['SecurityGroupIds'] = args['vpc_security_group_ids'].split(',') + request['VpcConfig']['Subnets'] = args['vpc_subnets'].split(',') else: request.pop('VpcConfig') ### Update input channels, must have at least one specified if len(args['channels']) > 0: request['InputDataConfig'] = args['channels'] - # Max number of input channels/data locations is 20, but currently only 8 data location parameters are exposed separately. - # Source: Input data configuration description in the SageMaker create training job form - for i in range(1, len(args['channels']) + 1): - if args['data_location_' + str(i)]: - request['InputDataConfig'][i-1]['DataSource']['S3DataSource']['S3Uri'] = args['data_location_' + str(i)] else: logging.error("Must specify at least one input channel.") raise Exception('Could not create job request') @@ -167,7 +193,7 @@ def create_training_job(client, args): raise Exception(e.response['Error']['Message']) -def wait_for_training_job(client, training_job_name): +def wait_for_training_job(client, training_job_name, poll_interval=30): while(True): response = client.describe_training_job(TrainingJobName=training_job_name) status = response['TrainingJobStatus'] @@ -179,7 +205,7 @@ def wait_for_training_job(client, training_job_name): logging.info('Training failed with the following error: {}'.format(message)) raise Exception('Training job failed') logging.info("Training job is still in status: " + status) - time.sleep(30) + time.sleep(poll_interval) def get_model_artifacts_from_job(client, job_name): @@ -190,9 +216,9 @@ def get_model_artifacts_from_job(client, job_name): def get_image_from_job(client, job_name): info = client.describe_training_job(TrainingJobName=job_name) - try: + if 'TrainingImage' in info['AlgorithmSpecification']: image = info['AlgorithmSpecification']['TrainingImage'] - except: + else: algorithm_name = info['AlgorithmSpecification']['AlgorithmName'] image = client.describe_algorithm(AlgorithmName=algorithm_name)['TrainingSpecification']['TrainingImage'] @@ -200,6 +226,16 @@ def get_image_from_job(client, job_name): def create_model(client, args): + request = create_model_request(args) + try: + create_model_response = client.create_model(**request) + logging.info("Model Config Arn: " + create_model_response['ModelArn']) + return create_model_response['ModelArn'] + except ClientError as e: + raise Exception(e.response['Error']['Message']) + + +def create_model_request(args): ### Documentation: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.create_model with open(os.path.join(__cwd__, 'model.template.yaml'), 'r') as f: request = yaml.safe_load(f) @@ -239,8 +275,8 @@ def create_model(client, args): ### Update or pop VPC configs if args['vpc_security_group_ids'] and args['vpc_subnets']: - request['VpcConfig']['SecurityGroupIds'] = [args['vpc_security_group_ids']] - request['VpcConfig']['Subnets'] = [args['vpc_subnets']] + request['VpcConfig']['SecurityGroupIds'] = args['vpc_security_group_ids'].split(',') + request['VpcConfig']['Subnets'] = args['vpc_subnets'].split(',') else: request.pop('VpcConfig') @@ -248,19 +284,14 @@ def create_model(client, args): for key, val in args['tags'].items(): request['Tags'].append({'Key': key, 'Value': val}) - create_model_response = client.create_model(**request) - - logging.info("Model Config Arn: " + create_model_response['ModelArn']) - return create_model_response['ModelArn'] - + return request def deploy_model(client, args): endpoint_config_name = create_endpoint_config(client, args) endpoint_name = create_endpoint(client, args['region'], args['endpoint_name'], endpoint_config_name, args['endpoint_tags']) return endpoint_name - -def create_endpoint_config(client, args): +def create_endpoint_config_request(args): ### Documentation: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.create_endpoint_config with open(os.path.join(__cwd__, 'endpoint_config.template.yaml'), 'r') as f: request = yaml.safe_load(f) @@ -299,12 +330,16 @@ def create_endpoint_config(client, args): for key, val in args['endpoint_config_tags'].items(): request['Tags'].append({'Key': key, 'Value': val}) + return request + +def create_endpoint_config(client, args): + request = create_endpoint_config_request(args) try: create_endpoint_config_response = client.create_endpoint_config(**request) logging.info("Endpoint configuration in SageMaker: https://{}.console.aws.amazon.com/sagemaker/home?region={}#/endpointConfig/{}" - .format(args['region'], args['region'], endpoint_config_name)) + .format(args['region'], args['region'], request['EndpointConfigName'])) logging.info("Endpoint Config Arn: " + create_endpoint_config_response['EndpointConfigArn']) - return endpoint_config_name + return request['EndpointConfigName'] except ClientError as e: raise Exception(e.response['Error']['Message']) @@ -461,7 +496,7 @@ def create_hyperparameter_tuning_job_request(args): request['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'] = args['categorical_parameters'] request['HyperParameterTuningJobConfig']['TrainingJobEarlyStoppingType'] = args['early_stopping_type'] - request['TrainingJobDefinition']['StaticHyperParameters'] = args['static_parameters'] + request['TrainingJobDefinition']['StaticHyperParameters'] = create_hyperparameters(args['static_parameters']) request['TrainingJobDefinition']['AlgorithmSpecification']['TrainingInputMode'] = args['training_input_mode'] ### Update training image (for BYOC) or algorithm resource name @@ -499,19 +534,14 @@ def create_hyperparameter_tuning_job_request(args): ### Update or pop VPC configs if args['vpc_security_group_ids'] and args['vpc_subnets']: - request['TrainingJobDefinition']['VpcConfig']['SecurityGroupIds'] = [args['vpc_security_group_ids']] - request['TrainingJobDefinition']['VpcConfig']['Subnets'] = [args['vpc_subnets']] + request['TrainingJobDefinition']['VpcConfig']['SecurityGroupIds'] = args['vpc_security_group_ids'].split(',') + request['TrainingJobDefinition']['VpcConfig']['Subnets'] = args['vpc_subnets'].split(',') else: request['TrainingJobDefinition'].pop('VpcConfig') ### Update input channels, must have at least one specified if len(args['channels']) > 0: request['TrainingJobDefinition']['InputDataConfig'] = args['channels'] - # Max number of input channels/data locations is 20, but currently only 8 data location parameters are exposed separately. - # Source: Input data configuration description in the SageMaker create hyperparameter tuning job form - for i in range(1, len(args['channels']) + 1): - if args['data_location_' + str(i)]: - request['TrainingJobDefinition']['InputDataConfig'][i-1]['DataSource']['S3DataSource']['S3Uri'] = args['data_location_' + str(i)] else: logging.error("Must specify at least one input channel.") raise Exception('Could not make job request') @@ -562,7 +592,7 @@ def create_hyperparameter_tuning_job(client, args): """Create a Sagemaker HPO job""" request = create_hyperparameter_tuning_job_request(args) try: - job_arn = client.create_hyper_parameter_tuning_job(**request) + client.create_hyper_parameter_tuning_job(**request) hpo_job_name = request['HyperParameterTuningJobName'] logging.info("Created Hyperparameter Training Job with name: " + hpo_job_name) logging.info("HPO job in SageMaker: https://{}.console.aws.amazon.com/sagemaker/home?region={}#/hyper-tuning-jobs/{}" @@ -574,7 +604,7 @@ def create_hyperparameter_tuning_job(client, args): raise Exception(e.response['Error']['Message']) -def wait_for_hyperparameter_training_job(client, hpo_job_name): +def wait_for_hyperparameter_training_job(client, hpo_job_name, poll_interval=30): ### Wait until the job finishes while(True): response = client.describe_hyper_parameter_tuning_job(HyperParameterTuningJobName=hpo_job_name) @@ -587,7 +617,7 @@ def wait_for_hyperparameter_training_job(client, hpo_job_name): logging.error('Hyperparameter tuning failed with the following error: {}'.format(message)) raise Exception('Hyperparameter tuning job failed') logging.info("Hyperparameter tuning job is still in status: " + status) - time.sleep(30) + time.sleep(poll_interval) def get_best_training_job_and_hyperparameters(client, hpo_job_name): @@ -601,6 +631,16 @@ def get_best_training_job_and_hyperparameters(client, hpo_job_name): def create_workteam(client, args): + try: + request = create_workteam_request(args) + response = client.create_workteam(**request) + portal = client.describe_workteam(WorkteamName=args['team_name'])['Workteam']['SubDomain'] + logging.info("Labeling portal: " + portal) + return response['WorkteamArn'] + except ClientError as e: + raise Exception(e.response['Error']['Message']) + +def create_workteam_request(args): ### Documentation: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.create_workteam """Create a workteam""" with open(os.path.join(__cwd__, 'workteam.template.yaml'), 'r') as f: @@ -620,13 +660,7 @@ def create_workteam(client, args): for key, val in args['tags'].items(): request['Tags'].append({'Key': key, 'Value': val}) - try: - response = client.create_workteam(**request) - portal = client.describe_workteam(WorkteamName=args['team_name'])['Workteam']['SubDomain'] - logging.info("Labeling portal: " + portal) - return response['WorkteamArn'] - except ClientError as e: - raise Exception(e.response['Error']['Message']) + return request def create_labeling_job_request(args): @@ -801,9 +835,17 @@ def get_labeling_job_outputs(client, labeling_job_name, auto_labeling): if auto_labeling: active_learning_model_arn = info['LabelingJobOutput']['FinalActiveLearningModelArn'] else: - active_learning_model_arn = '' + active_learning_model_arn = ' ' return output_manifest, active_learning_model_arn +def create_hyperparameters(hyperparam_args): + # Validate all values are strings + for key, value in hyperparam_args.items(): + if not isinstance(value, str): + raise Exception(f"Could not parse hyperparameters. Value for {key} was not a string.") + + return hyperparam_args + def enable_spot_instance_support(training_job_config, args): if args['spot_instance']: training_job_config['EnableManagedSpotTraining'] = args['spot_instance'] @@ -827,35 +869,15 @@ def enable_spot_instance_support(training_job_config, args): def id_generator(size=4, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) - -def str_to_bool(s): - if s.lower().strip() == 'true': - return True - elif s.lower().strip() == 'false': - return False - else: - raise argparse.ArgumentTypeError('"True" or "False" expected.') - -def str_to_int(s): - if s: - return int(s) - else: - return 0 - -def str_to_float(s): - if s: - return float(s) - else: - return 0.0 - -def str_to_json_dict(s): - if s != '': - return json.loads(s) - else: - return {} - -def str_to_json_list(s): - if s != '': - return json.loads(s) - else: - return [] +def yaml_or_json_str(str): + if str == "" or str == None: + return None + try: + return json.loads(str) + except: + return yaml.safe_load(str) + +def str_to_bool(str): + # This distutils function returns an integer representation of the boolean + # rather than a True/False value. This simply hard casts it. + return bool(strtobool(str)) diff --git a/components/aws/sagemaker/deploy/README.md b/components/aws/sagemaker/deploy/README.md new file mode 100644 index 00000000000..c69525cea96 --- /dev/null +++ b/components/aws/sagemaker/deploy/README.md @@ -0,0 +1,62 @@ +# SageMaker Hosting Services - Create Endpoint Kubeflow Pipeline component + +## Summary +Component to deploy a model in SageMaker Hosting Service from a Kubeflow Pipelines workflow. + +## Details +Deploying a model using Amazon SageMaker hosting services is a three-step process: + +1. **Create a model in Amazon SageMaker** - Specify the S3 path where model artifacts are stored and Docker registry path for the image that contains the inference code +2. **Create an endpoint configuration for an HTTPS endpoint** - Specify the name of model in production variants and the type of instance that you want Amazon SageMaker to launch to host the model. +3. **Create an HTTPS endpoint** - Launch the ML compute instances and deploy the model as specified in the endpoint configuration + +This component handles Step 2 and 3. Step 1 can be done using the [create model component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/model) for AWS SageMaker. + +## Intended Use +Create an endpoint in AWS SageMaker Hosting Service for model deployment. + +## Runtime Arguments +Argument | Description | Optional (in pipeline definition) | Optional (in UI) | Data type | Accepted values | Default | +:--- | :---------- | :---------- | :---------- | :----------| :---------- | :----------| +region | The region where the endpoint is created | No | No | String | | | +endpoint_url | The endpoint URL for the private link VPC endpoint | Yes | Yes | String | | | +endpoint_config_name | The name of the endpoint configuration | Yes | Yes | String | | | +endpoint_config_tags | Key-value pairs to tag endpoint configurations in AWS | Yes | Yes | Dict | | {} | +endpoint_tags | Key-value pairs to tag the Hosting endpoint in AWS | Yes | Yes | Dict | | {} | +endpoint_name | The name of the endpoint. The name must be unique within an AWS Region in your AWS account | Yes | Yes | String | | | + +In SageMaker, you can create an endpoint that can host multiple models. The set of parameters below represent a production variant. A production variant identifies a model that you want to host and the resources (e.g. instance type, initial traffic distribution etc.) to deploy for hosting it. You must specify at least one production variant to create an endpoint. + +Argument | Description | Optional (in pipeline definition) | Optional (in UI) | Data type | Accepted values | Default | +:--- | :---------- | :---------- | :---------- | :----------| :---------- | :----------| +model_name_[1, 3] | The name of the model that you want to host. This is the name that you specified when creating the model | No | No | String | | | +variant_name_[1, 3] | The name of the production variant | Yes | Yes | String | | variant_name_[1, 3] | +instance_type_[1, 3] | The ML compute instance type | Yes | Yes | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge [and many more](https://aws.amazon.com/sagemaker/pricing/instance-types/)| ml.m4.xlarge | +initial_instance_count_[1, 3] | Number of instances to launch initially | Yes | Yes | Integer | ≥ 1 | 1 | +initial_variant_weight_[1, 3] | Determines initial traffic distribution among all of the models that you specify in the endpoint configuration. The traffic to a production variant is determined by the ratio of the VariantWeight to the sum of all VariantWeight values across all ProductionVariants. | Yes | Yes | Float | Minimum value of 0 | | +accelerator_type_[1, 3] | The size of the Elastic Inference (EI) instance to use for the production variant | Yes | Yes | String| ml.eia1.medium, ml.eia1.large, ml.eia1.xlarge | | + +Notes: +* Please use the links in the [Resources section](#Resources) for detailed information on each input parameter and SageMaker APIs used in this component +* The parameters, `model_name_1` through `3`, is intended to be output of [create model component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/model) from previous steps in the pipeline. `model_name_[1, 3]` and other parameters for a production variant can be specified directly as well if the component is being used on its own. + +## Outputs +Name | Description +:--- | :---------- +endpoint_name | HTTPS Endpoint URL where client applications can send requests using [InvokeEndpoint](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_InvokeEndpoint.html) API + +## Requirements +* [Kubeflow pipelines SDK](https://www.kubeflow.org/docs/pipelines/sdk/install-sdk/) +* [Kubeflow set-up](https://www.kubeflow.org/docs/aws/deploy/install-kubeflow/) + +## Samples +### Integrated into a pipeline +MNIST Classification pipeline: [Pipeline](https://github.com/kubeflow/pipelines/blob/master/samples/contrib/aws-samples/mnist-kmeans-sagemaker/mnist-classification-pipeline.py) | [Steps](https://github.com/kubeflow/pipelines/blob/master/samples/contrib/aws-samples/mnist-kmeans-sagemaker/README.md) + +## Resources +* Create Endpoint Configuration + * [Create Endpoint Configuration API documentation](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_CreateEndpointConfig.html) + * [Boto3 API reference](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.create_endpoint_config) +* Create Endpoint + * [Create Endpoint API documentation](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_CreateEndpoint.html) + * [Boto3 API reference](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.create_endpoint) diff --git a/components/aws/sagemaker/deploy/component.yaml b/components/aws/sagemaker/deploy/component.yaml index fce7a4f3bd9..349bfb62a8e 100644 --- a/components/aws/sagemaker/deploy/component.yaml +++ b/components/aws/sagemaker/deploy/component.yaml @@ -4,83 +4,108 @@ description: | inputs: - name: region description: 'The region to deploy your model endpoints.' + type: String - name: endpoint_config_name description: 'The name of the endpoint configuration.' default: '' + type: String - name: variant_name_1 description: 'The name of the production variant.' default: 'variant-name-1' + type: String - name: model_name_1 description: 'The model name used for endpoint deployment.' + type: String - name: initial_instance_count_1 description: 'Number of instances to launch initially.' default: '1' + type: Integer - name: instance_type_1 description: 'The ML compute instance type.' default: 'ml.m4.xlarge' + type: String - name: initial_variant_weight_1 description: 'Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.' default: '1.0' + type: Float - name: accelerator_type_1 description: 'The size of the Elastic Inference (EI) instance to use for the production variant.' default: '' + type: String - name: variant_name_2 description: 'The name of the production variant.' default: 'variant-name-2' + type: String - name: model_name_2 description: 'The model name used for endpoint deployment.' default: '' + type: String - name: initial_instance_count_2 description: 'Number of instances to launch initially.' default: '1' + type: Integer - name: instance_type_2 description: 'The ML compute instance type.' default: 'ml.m4.xlarge' + type: String - name: initial_variant_weight_2 description: 'Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.' default: '1.0' + type: Float - name: accelerator_type_2 description: 'The size of the Elastic Inference (EI) instance to use for the production variant.' default: '' + type: String - name: variant_name_3 description: 'The name of the production variant.' default: 'variant-name-3' + type: String - name: model_name_3 description: 'The model name used for endpoint deployment' default: '' + type: String - name: initial_instance_count_3 description: 'Number of instances to launch initially.' default: '1' + type: Integer - name: instance_type_3 description: 'The ML compute instance type.' default: 'ml.m4.xlarge' + type: String - name: initial_variant_weight_3 description: 'Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.' default: '1.0' + type: Float - name: accelerator_type_3 description: 'The size of the Elastic Inference (EI) instance to use for the production variant.' default: '' + type: String - name: resource_encryption_key description: 'The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance that hosts the endpoint.' default: '' + type: String - name: endpoint_url description: 'The endpoint URL for the private link VPC endpoint.' default: '' + type: String - name: endpoint_config_tags description: 'Key-value pairs to categorize AWS resources.' default: '{}' + type: JsonObject - name: endpoint_name description: 'The name of the endpoint.' default: '' + type: String - name: endpoint_tags description: 'Key-value pairs to categorize AWS resources.' default: '{}' + type: JsonObject outputs: - {name: endpoint_name, description: 'Endpoint name'} implementation: container: - image: redbackthomson/aws-kubeflow-sagemaker:20191003 - command: ['python'] + image: amazon/aws-sagemaker-kfp-components:0.3.1 + command: ['python3'] args: [ deploy.py, --region, {inputValue: region}, diff --git a/components/aws/sagemaker/deploy/src/deploy.py b/components/aws/sagemaker/deploy/src/deploy.py index 519b3f5d0b2..1f253227565 100644 --- a/components/aws/sagemaker/deploy/src/deploy.py +++ b/components/aws/sagemaker/deploy/src/deploy.py @@ -19,36 +19,29 @@ def create_parser(): parser = argparse.ArgumentParser(description='SageMaker Training Job') _utils.add_default_client_arguments(parser) - parser.add_argument('--endpoint_config_name', type=str.strip, required=False, help='The name of the endpoint configuration.', default='') - parser.add_argument('--variant_name_1', type=str.strip, required=False, help='The name of the production variant.', default='variant-name-1') - parser.add_argument('--model_name_1', type=str.strip, required=True, help='The model name used for endpoint deployment.') - parser.add_argument('--initial_instance_count_1', type=_utils.str_to_int, required=False, help='Number of instances to launch initially.', default=1) - parser.add_argument('--instance_type_1', choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', ''], type=str.strip, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') - parser.add_argument('--initial_variant_weight_1', type=_utils.str_to_float, required=False, help='Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.', default=1.0) - parser.add_argument('--accelerator_type_1', choices=['ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ''], type=str.strip, required=False, help='The size of the Elastic Inference (EI) instance to use for the production variant.', default='') - parser.add_argument('--variant_name_2', type=str.strip, required=False, help='The name of the production variant.', default='variant-name-2') - parser.add_argument('--model_name_2', type=str.strip, required=False, help='The model name used for endpoint deployment.', default='') - parser.add_argument('--initial_instance_count_2', type=_utils.str_to_int, required=False, help='Number of instances to launch initially.', default=1) - parser.add_argument('--instance_type_2', choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', ''], type=str.strip, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') - parser.add_argument('--initial_variant_weight_2', type=_utils.str_to_float, required=False, help='Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.', default=1.0) - parser.add_argument('--accelerator_type_2', choices=['ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ''], type=str.strip, required=False, help='The size of the Elastic Inference (EI) instance to use for the production variant.', default='') - parser.add_argument('--variant_name_3', type=str.strip, required=False, help='The name of the production variant.', default='variant-name-3') - parser.add_argument('--model_name_3', type=str.strip, required=False, help='The model name used for endpoint deployment.', default='') - parser.add_argument('--initial_instance_count_3', type=_utils.str_to_int, required=False, help='Number of instances to launch initially.', default=1) - parser.add_argument('--instance_type_3', choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', ''], type=str.strip, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') - parser.add_argument('--initial_variant_weight_3', type=_utils.str_to_float, required=False, help='Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.', default=1.0) - parser.add_argument('--accelerator_type_3', choices=['ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ''], type=str.strip, required=False, help='The size of the Elastic Inference (EI) instance to use for the production variant.', default='') - parser.add_argument('--resource_encryption_key', type=str.strip, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') - parser.add_argument('--endpoint_config_tags', type=_utils.str_to_json_dict, required=False, help='An array of key-value pairs, to categorize AWS resources.', default='{}') - - parser.add_argument('--endpoint_name', type=str.strip, required=False, help='The name of the endpoint.', default='') - parser.add_argument('--endpoint_tags', type=_utils.str_to_json_dict, required=False, help='An array of key-value pairs, to categorize AWS resources.', default='{}') + parser.add_argument('--endpoint_config_name', type=str, required=False, help='The name of the endpoint configuration.', default='') + parser.add_argument('--variant_name_1', type=str, required=False, help='The name of the production variant.', default='variant-name-1') + parser.add_argument('--model_name_1', type=str, required=True, help='The model name used for endpoint deployment.') + parser.add_argument('--initial_instance_count_1', type=int, required=False, help='Number of instances to launch initially.', default=1) + parser.add_argument('--instance_type_1', type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') + parser.add_argument('--initial_variant_weight_1', type=float, required=False, help='Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.', default=1.0) + parser.add_argument('--accelerator_type_1', choices=['ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ''], type=str, required=False, help='The size of the Elastic Inference (EI) instance to use for the production variant.', default='') + parser.add_argument('--variant_name_2', type=str, required=False, help='The name of the production variant.', default='variant-name-2') + parser.add_argument('--model_name_2', type=str, required=False, help='The model name used for endpoint deployment.', default='') + parser.add_argument('--initial_instance_count_2', type=int, required=False, help='Number of instances to launch initially.', default=1) + parser.add_argument('--instance_type_2', type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') + parser.add_argument('--initial_variant_weight_2', type=float, required=False, help='Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.', default=1.0) + parser.add_argument('--accelerator_type_2', choices=['ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ''], type=str, required=False, help='The size of the Elastic Inference (EI) instance to use for the production variant.', default='') + parser.add_argument('--variant_name_3', type=str, required=False, help='The name of the production variant.', default='variant-name-3') + parser.add_argument('--model_name_3', type=str, required=False, help='The model name used for endpoint deployment.', default='') + parser.add_argument('--initial_instance_count_3', type=int, required=False, help='Number of instances to launch initially.', default=1) + parser.add_argument('--instance_type_3', type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') + parser.add_argument('--initial_variant_weight_3', type=float, required=False, help='Determines initial traffic distribution among all of the models that you specify in the endpoint configuration.', default=1.0) + parser.add_argument('--accelerator_type_3', choices=['ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ''], type=str, required=False, help='The size of the Elastic Inference (EI) instance to use for the production variant.', default='') + parser.add_argument('--resource_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') + parser.add_argument('--endpoint_config_tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) + parser.add_argument('--endpoint_name', type=str, required=False, help='The name of the endpoint.', default='') + parser.add_argument('--endpoint_tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) return parser diff --git a/components/aws/sagemaker/ground_truth/component.yaml b/components/aws/sagemaker/ground_truth/component.yaml index 47502f4494b..bb25b14a98f 100644 --- a/components/aws/sagemaker/ground_truth/component.yaml +++ b/components/aws/sagemaker/ground_truth/component.yaml @@ -4,92 +4,123 @@ description: | inputs: - name: region description: 'The region where the cluster launches.' + type: String - name: role description: 'The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.' + type: String - name: job_name description: 'The name of the labeling job.' + type: String - name: label_attribute_name description: 'The attribute name to use for the label in the output manifest file. Default is the job name.' default: '' + type: String - name: manifest_location description: 'The Amazon S3 location of the manifest file that describes the input data objects.' + type: String - name: output_location description: 'The Amazon S3 location to write output data.' + type: String - name: output_encryption_key description: 'The AWS Key Management Service ID of the key used to encrypt the output data.' default: '' + type: String - name: task_type description: 'Built in image classification, bounding box, text classification, or semantic segmentation, or custom. If custom, please provide pre- and post-labeling task lambda functions.' + type: String - name: worker_type description: 'The workteam for data labeling, either public, private, or vendor.' + type: String - name: workteam_arn description: 'The ARN of the work team assigned to complete the tasks.' default: '' + type: String - name: no_adult_content description: 'If true, your data is free of adult content.' default: 'False' + type: Bool - name: no_ppi description: 'If true, your data is free of personally identifiable information.' default: 'False' + type: Bool - name: label_category_config description: 'The S3 URL of the JSON structured file that defines the categories used to label the data objects.' default: '' + type: String - name: max_human_labeled_objects description: 'The maximum number of objects that can be labeled by human workers.' - default: '' + default: '0' + type: Integer - name: max_percent_objects description: 'The maximum number of input data objects that should be labeled.' - default: '' + default: '0' + type: Integer - name: enable_auto_labeling description: 'Enables auto-labeling, only for bounding box, text classification, and image classification.' default: 'False' + type: Bool - name: initial_model_arn description: 'The ARN of the final model used for a previous auto-labeling job.' default: '' + type: String - name: resource_encryption_key description: 'The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).' default: '' + type: String - name: ui_template description: 'The Amazon S3 bucket location of the UI template.' + type: String - name: pre_human_task_function description: 'The ARN of a Lambda function that is run before a data object is sent to a human worker.' default: '' + type: String - name: post_human_task_function description: 'The ARN of a Lambda function implements the logic for annotation consolidation.' default: '' + type: String - name: task_keywords description: 'Keywords used to describe the task so that workers on Amazon Mechanical Turk can discover the task.' default: '' + type: String - name: title description: 'A title for the task for your human workers.' + type: String - name: description description: 'A description of the task for your human workers.' + type: String - name: num_workers_per_object description: 'The number of human workers that will label an object.' + type: Integer - name: time_limit description: 'The amount of time that a worker has to complete a task in seconds' + type: Integer - name: task_availibility description: 'The length of time that a task remains available for labeling by human workers.' default: '' + type: Integer - name: max_concurrent_tasks description: 'The maximum number of data objects that can be labeled by human workers at the same time.' default: '' + type: Integer - name: workforce_task_price description: 'The price that you pay for each task performed by a public worker in USD. Specify to the tenth fractions of a cent. Format as "0.000".' default: '0.000' + type: Float - name: endpoint_url description: 'The endpoint URL for the private link VPC endpoint.' default: '' + type: String - name: tags description: 'Key-value pairs to categorize AWS resources.' default: '{}' + type: JsonObject outputs: - {name: output_manifest_location, description: 'The Amazon S3 bucket location of the manifest file for labeled data.'} - {name: active_learning_model_arn, description: 'The ARN for the most recent Amazon SageMaker model trained as part of automated data labeling.'} implementation: container: - image: redbackthomson/aws-kubeflow-sagemaker:20191003 - command: ['python'] + image: amazon/aws-sagemaker-kfp-components:0.3.1 + command: ['python3'] args: [ ground_truth.py, --region, {inputValue: region}, diff --git a/components/aws/sagemaker/ground_truth/src/ground_truth.py b/components/aws/sagemaker/ground_truth/src/ground_truth.py index 8f6db7d544a..68f7a557dd2 100644 --- a/components/aws/sagemaker/ground_truth/src/ground_truth.py +++ b/components/aws/sagemaker/ground_truth/src/ground_truth.py @@ -19,35 +19,35 @@ def create_parser(): parser = argparse.ArgumentParser(description='SageMaker Ground Truth Job') _utils.add_default_client_arguments(parser) - parser.add_argument('--role', type=str.strip, required=True, help='The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.') - parser.add_argument('--job_name', type=str.strip, required=True, help='The name of the labeling job.') - parser.add_argument('--label_attribute_name', type=str.strip, required=False, help='The attribute name to use for the label in the output manifest file. Default is the job name.', default='') - parser.add_argument('--manifest_location', type=str.strip, required=True, help='The Amazon S3 location of the manifest file that describes the input data objects.') - parser.add_argument('--output_location', type=str.strip, required=True, help='The Amazon S3 location to write output data.') - parser.add_argument('--output_encryption_key', type=str.strip, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.', default='') - parser.add_argument('--task_type', type=str.strip, required=True, help='Built in image classification, bounding box, text classification, or semantic segmentation, or custom. If custom, please provide pre- and post-labeling task lambda functions.') - parser.add_argument('--worker_type', type=str.strip, required=True, help='The workteam for data labeling, either public, private, or vendor.') - parser.add_argument('--workteam_arn', type=str.strip, required=False, help='The ARN of the work team assigned to complete the tasks.') + parser.add_argument('--role', type=str, required=True, help='The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.') + parser.add_argument('--job_name', type=str, required=True, help='The name of the labeling job.') + parser.add_argument('--label_attribute_name', type=str, required=False, help='The attribute name to use for the label in the output manifest file. Default is the job name.', default='') + parser.add_argument('--manifest_location', type=str, required=True, help='The Amazon S3 location of the manifest file that describes the input data objects.') + parser.add_argument('--output_location', type=str, required=True, help='The Amazon S3 location to write output data.') + parser.add_argument('--output_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.', default='') + parser.add_argument('--task_type', type=str, required=True, help='Built in image classification, bounding box, text classification, or semantic segmentation, or custom. If custom, please provide pre- and post-labeling task lambda functions.') + parser.add_argument('--worker_type', type=str, required=True, help='The workteam for data labeling, either public, private, or vendor.') + parser.add_argument('--workteam_arn', type=str, required=False, help='The ARN of the work team assigned to complete the tasks.') parser.add_argument('--no_adult_content', type=_utils.str_to_bool, required=False, help='If true, your data is free of adult content.', default='False') parser.add_argument('--no_ppi', type=_utils.str_to_bool, required=False, help='If true, your data is free of personally identifiable information.', default='False') - parser.add_argument('--label_category_config', type=str.strip, required=False, help='The S3 URL of the JSON structured file that defines the categories used to label the data objects.', default='') - parser.add_argument('--max_human_labeled_objects', type=_utils.str_to_int, required=False, help='The maximum number of objects that can be labeled by human workers.', default=0) - parser.add_argument('--max_percent_objects', type=_utils.str_to_int, required=False, help='The maximum percentatge of input data objects that should be labeled.', default=0) + parser.add_argument('--label_category_config', type=str, required=False, help='The S3 URL of the JSON structured file that defines the categories used to label the data objects.', default='') + parser.add_argument('--max_human_labeled_objects', type=int, required=False, help='The maximum number of objects that can be labeled by human workers.', default=0) + parser.add_argument('--max_percent_objects', type=int, required=False, help='The maximum percentatge of input data objects that should be labeled.', default=0) parser.add_argument('--enable_auto_labeling', type=_utils.str_to_bool, required=False, help='Enables auto-labeling, only for bounding box, text classification, and image classification.', default=False) - parser.add_argument('--initial_model_arn', type=str.strip, required=False, help='The ARN of the final model used for a previous auto-labeling job.', default='') - parser.add_argument('--resource_encryption_key', type=str.strip, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') - parser.add_argument('--ui_template', type=str.strip, required=True, help='The Amazon S3 bucket location of the UI template.') - parser.add_argument('--pre_human_task_function', type=str.strip, required=False, help='The ARN of a Lambda function that is run before a data object is sent to a human worker.', default='') - parser.add_argument('--post_human_task_function', type=str.strip, required=False, help='The ARN of a Lambda function implements the logic for annotation consolidation.', default='') - parser.add_argument('--task_keywords', type=str.strip, required=False, help='Keywords used to describe the task so that workers on Amazon Mechanical Turk can discover the task.', default='') - parser.add_argument('--title', type=str.strip, required=True, help='A title for the task for your human workers.') - parser.add_argument('--description', type=str.strip, required=True, help='A description of the task for your human workers.') - parser.add_argument('--num_workers_per_object', type=_utils.str_to_int, required=True, help='The number of human workers that will label an object.') - parser.add_argument('--time_limit', type=_utils.str_to_int, required=True, help='The amount of time that a worker has to complete a task in seconds') - parser.add_argument('--task_availibility', type=_utils.str_to_int, required=False, help='The length of time that a task remains available for labelling by human workers.', default=0) - parser.add_argument('--max_concurrent_tasks', type=_utils.str_to_int, required=False, help='The maximum number of data objects that can be labeled by human workers at the same time.', default=0) - parser.add_argument('--workforce_task_price', type=_utils.str_to_float, required=False, help='The price that you pay for each task performed by a public worker in USD. Specify to the tenth fractions of a cent. Format as "0.000".', default=0.000) - parser.add_argument('--tags', type=_utils.str_to_json_dict, required=False, help='An array of key-value pairs, to categorize AWS resources.', default='{}') + parser.add_argument('--initial_model_arn', type=str, required=False, help='The ARN of the final model used for a previous auto-labeling job.', default='') + parser.add_argument('--resource_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') + parser.add_argument('--ui_template', type=str, required=True, help='The Amazon S3 bucket location of the UI template.') + parser.add_argument('--pre_human_task_function', type=str, required=False, help='The ARN of a Lambda function that is run before a data object is sent to a human worker.', default='') + parser.add_argument('--post_human_task_function', type=str, required=False, help='The ARN of a Lambda function implements the logic for annotation consolidation.', default='') + parser.add_argument('--task_keywords', type=str, required=False, help='Keywords used to describe the task so that workers on Amazon Mechanical Turk can discover the task.', default='') + parser.add_argument('--title', type=str, required=True, help='A title for the task for your human workers.') + parser.add_argument('--description', type=str, required=True, help='A description of the task for your human workers.') + parser.add_argument('--num_workers_per_object', type=int, required=True, help='The number of human workers that will label an object.') + parser.add_argument('--time_limit', type=int, required=True, help='The amount of time that a worker has to complete a task in seconds') + parser.add_argument('--task_availibility', type=int, required=False, help='The length of time that a task remains available for labelling by human workers.', default=0) + parser.add_argument('--max_concurrent_tasks', type=int, required=False, help='The maximum number of data objects that can be labeled by human workers at the same time.', default=0) + parser.add_argument('--workforce_task_price', type=float, required=False, help='The price that you pay for each task performed by a public worker in USD. Specify to the tenth fractions of a cent. Format as "0.000".', default=0.000) + parser.add_argument('--tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) return parser diff --git a/components/aws/sagemaker/hyperparameter_tuning/README.md b/components/aws/sagemaker/hyperparameter_tuning/README.md index 843b908aac7..8718e5fae8a 100644 --- a/components/aws/sagemaker/hyperparameter_tuning/README.md +++ b/components/aws/sagemaker/hyperparameter_tuning/README.md @@ -26,18 +26,17 @@ integer_parameters | The array of IntegerParameterRange objects that specify ran continuous_parameters | The array of ContinuousParameterRange objects that specify ranges of continuous hyperparameters that you want to search | Yes | Yes | List of Dicts | | [] | categorical_parameters | The array of CategoricalParameterRange objects that specify ranges of categorical hyperparameters that you want to search | Yes | Yes | List of Dicts | | [] | channels | A list of dicts specifying the input channels (at least one); refer to [documentation](https://github.com/awsdocs/amazon-sagemaker-developer-guide/blob/master/doc_source/API_Channel.md) for parameters | No | No | List of Dicts | | | -data_location_[1, 8] | The S3 URI of the input data source for channel [1, 8] | Yes | Yes | | | output_location | The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job | No | No | String | | | output_encryption_key | The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts | Yes | Yes | String | | | -instance_type | The ML compute instance type | Yes | No | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge | ml.m4.xlarge | +instance_type | The ML compute instance type | Yes | No | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge [and many more](https://aws.amazon.com/sagemaker/pricing/instance-types/)| ml.m4.xlarge | instance_count | The number of ML compute instances to use in each training job | Yes | Yes | Int | ≥ 1 | 1 | -volume_size | The size of the ML storage volume that you want to provision in GB | Yes | Yes | Int | ≥ 1 | 1 | +volume_size | The size of the ML storage volume that you want to provision in GB | Yes | Yes | Int | ≥ 1 | 30 | max_num_jobs | The maximum number of training jobs that a hyperparameter tuning job can launch | No | No | Int | [1, 500] | | max_parallel_jobs | The maximum number of concurrent training jobs that a hyperparameter tuning job can launch | No | No | Int | [1, 10] | | max_run_time | The maximum run time in seconds per training job | Yes | Yes | Int | ≤ 432000 (5 days) | 86400 (1 day) | resource_encryption_key | The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) | Yes | Yes | String | | | -vpc_security_group_ids | The VPC security group IDs, in the form sg-xxxxxxxx | Yes | Yes | String | | | -vpc_subnets | The ID of the subnets in the VPC to which you want to connect your hpo job | Yes | Yes | String | | | +vpc_security_group_ids | A comma-delimited list of security group IDs, in the form sg-xxxxxxxx | Yes | Yes | String | | | +vpc_subnets | A comma-delimited list of subnet IDs in the VPC to which you want to connect your hpo job | Yes | Yes | String | | | network_isolation | Isolates the training container if true | Yes | No | Boolean | False, True | True | traffic_encryption | Encrypts all communications between ML compute instances in distributed training if true | Yes | No | Boolean | False, True | False | spot_instance | Use managed spot training if true | Yes | No | Boolean | False, True | False | @@ -52,7 +51,6 @@ Notes: * Specify training image OR algorithm name. Use the image parameter for Bring Your Own Container (BYOC) algorithms, and algorithm name for Amazon built-in algorithms, custom algorithm resources in SageMaker, and algorithms subscribed to from the AWS Marketplace. * Specify VPC security group IDs AND VPC subnets to specify the VPC that you want the training jobs to connect to. * Specify warm start type AND 1 to 5 parent HPO jobs to launch the hyperparameter tuning job with previous jobs as a starting point. -* The parameters, data_location_1 through 8, is intended to be used for inputting the S3 URI outputs from previous steps in the pipeline, for example, from a Ground Truth labeling job. Otherwise, the S3 data location can be specified directly in the channels parameter. ## Outputs Name | Description diff --git a/components/aws/sagemaker/hyperparameter_tuning/component.yaml b/components/aws/sagemaker/hyperparameter_tuning/component.yaml index d5ab8e421a0..396495f0934 100644 --- a/components/aws/sagemaker/hyperparameter_tuning/component.yaml +++ b/components/aws/sagemaker/hyperparameter_tuning/component.yaml @@ -7,125 +7,136 @@ inputs: - name: job_name description: 'The name of the tuning job. Must be unique within the same AWS account and AWS region.' default: '' + type: String - name: role description: 'The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.' + type: String - name: image description: 'The registry path of the Docker image that contains the training algorithm.' default: '' + type: String - name: algorithm_name description: 'The name of the algorithm resource to use for the hyperparameter tuning job. Do not specify a value for this if using training image.' default: '' + type: String - name: training_input_mode description: 'The input mode that the algorithm supports. File or Pipe.' default: 'File' + type: String - name: metric_definitions description: 'The dictionary of name-regex pairs specify the metrics that the algorithm emits.' default: '{}' + type: JsonObject - name: strategy description: 'How hyperparameter tuning chooses the combinations of hyperparameter values to use for the training job it launches.' default: 'Bayesian' + type: String - name: metric_name description: 'The name of the metric to use for the objective metric.' + type: String - name: metric_type description: 'Whether to minimize or maximize the objective metric.' + type: String - name: early_stopping_type description: 'Whether to use early stopping for training jobs launched by the tuning job.' default: 'Off' + type: String - name: static_parameters description: 'The values of hyperparameters that do not change for the tuning job.' default: '{}' + type: JsonObject - name: integer_parameters description: 'The array of IntegerParameterRange objects that specify ranges of integer hyperparameters that you want to search.' default: '[]' + type: JsonArray - name: continuous_parameters description: 'The array of ContinuousParameterRange objects that specify ranges of continuous hyperparameters that you want to search.' default: '[]' + type: JsonObject - name: categorical_parameters description: 'The array of CategoricalParameterRange objects that specify ranges of categorical hyperparameters that you want to search.' default: '[]' + type: JsonArray - name: channels description: 'A list of dicts specifying the input channels. Must have at least one.' - - name: data_location_1 - description: 'The S3 URI of the input data source for channel 1.' - default: '' - - name: data_location_2 - description: 'The S3 URI of the input data source for channel 2.' - default: '' - - name: data_location_3 - description: 'The S3 URI of the input data source for channel 3.' - default: '' - - name: data_location_4 - description: 'The S3 URI of the input data source for channel 4.' - default: '' - - name: data_location_5 - description: 'The S3 URI of the input data source for channel 5.' - default: '' - - name: data_location_6 - description: 'The S3 URI of the input data source for channel 6.' - default: '' - - name: data_location_7 - description: 'The S3 URI of the input data source for channel 7.' - default: '' - - name: data_location_8 - description: 'The S3 URI of the input data source for channel 8.' - default: '' + type: JsonArray - name: output_location description: 'The Amazon S3 path where you want Amazon SageMaker to store the model artifacts is from the best training job.' + type: String - name: output_encryption_key description: 'The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.' default: '' + type: String - name: instance_type description: 'The ML compute instance type.' default: 'ml.m4.xlarge' + type: String - name: instance_count description: 'The number of ML compute instances to use in each training job.' default: '1' + type: Integer - name: volume_size description: 'The size of the ML storage volume that you want to provision.' - default: '1' + default: '30' + type: Integer - name: max_num_jobs description: 'The maximum number of training jobs that a hyperparameter tuning job can launch.' + type: Integer - name: max_parallel_jobs description: 'The maximum number of concurrent training jobs that a hyperparameter tuning job can launch.' + type: Integer - name: max_run_time description: 'The maximum run time in seconds per training job.' default: '86400' + type: Integer - name: resource_encryption_key description: 'The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).' default: '' + type: String - name: vpc_security_group_ids description: 'The VPC security group IDs, in the form sg-xxxxxxxx.' default: '' + type: String - name: vpc_subnets description: 'The ID of the subnets in the VPC to which you want to connect your hpo job.' default: '' + type: String - name: network_isolation description: 'Isolates the training container.' default: 'True' + type: Bool - name: traffic_encryption description: 'Encrypts all communications between ML compute instances in distributed training.' default: 'False' + type: Bool - name: spot_instance description: 'Use managed spot training.' default: 'False' + type: Bool - name: max_wait_time description: 'The maximum time in seconds you are willing to wait for a managed spot training job to complete.' default: '86400' + type: Integer - name: checkpoint_config description: 'Dictionary of information about the output location for managed spot training checkpoint data.' default: '{}' + type: JsonObject - name: warm_start_type description: 'Specifies either "IdenticalDataAndAlgorithm" or "TransferLearning"' default: '' + type: String - name: parent_hpo_jobs description: 'List of previously completed or stopped hyperparameter tuning jobs to be used as a starting point.' default: '' + type: String - name: endpoint_url description: 'The endpoint URL for the private link VPC endpoint.' default: '' + type: String - name: tags description: 'Key-value pairs, to categorize AWS resources.' default: '{}' + type: JsonObject outputs: - name: hpo_job_name description: 'The name of the hyper parameter tuning job' @@ -139,8 +150,8 @@ outputs: description: 'The registry path of the Docker image that contains the training algorithm' implementation: container: - image: redbackthomson/aws-kubeflow-sagemaker:20191003 - command: ['python'] + image: amazon/aws-sagemaker-kfp-components:0.3.1 + command: ['python3'] args: [ hyperparameter_tuning.py, --region, {inputValue: region}, @@ -160,14 +171,6 @@ implementation: --continuous_parameters, {inputValue: continuous_parameters}, --categorical_parameters, {inputValue: categorical_parameters}, --channels, {inputValue: channels}, - --data_location_1, {inputValue: data_location_1}, - --data_location_2, {inputValue: data_location_2}, - --data_location_3, {inputValue: data_location_3}, - --data_location_4, {inputValue: data_location_4}, - --data_location_5, {inputValue: data_location_5}, - --data_location_6, {inputValue: data_location_6}, - --data_location_7, {inputValue: data_location_7}, - --data_location_8, {inputValue: data_location_8}, --output_location, {inputValue: output_location}, --output_encryption_key, {inputValue: output_encryption_key}, --instance_type, {inputValue: instance_type}, diff --git a/components/aws/sagemaker/hyperparameter_tuning/src/hyperparameter_tuning.py b/components/aws/sagemaker/hyperparameter_tuning/src/hyperparameter_tuning.py index 132b8a2b9af..068e52f5bde 100644 --- a/components/aws/sagemaker/hyperparameter_tuning/src/hyperparameter_tuning.py +++ b/components/aws/sagemaker/hyperparameter_tuning/src/hyperparameter_tuning.py @@ -19,61 +19,51 @@ def create_parser(): parser = argparse.ArgumentParser(description='SageMaker Hyperparameter Tuning Job') _utils.add_default_client_arguments(parser) - - parser.add_argument('--job_name', type=str.strip, required=False, help='The name of the tuning job. Must be unique within the same AWS account and AWS region.') - parser.add_argument('--role', type=str.strip, required=True, help='The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.') - parser.add_argument('--image', type=str.strip, required=True, help='The registry path of the Docker image that contains the training algorithm.', default='') - parser.add_argument('--algorithm_name', type=str.strip, required=False, help='The name of the resource algorithm to use for the hyperparameter tuning job.', default='') - parser.add_argument('--training_input_mode', choices=['File', 'Pipe'], type=str.strip, required=False, help='The input mode that the algorithm supports. File or Pipe.', default='File') - parser.add_argument('--metric_definitions', type=_utils.str_to_json_dict, required=False, help='The dictionary of name-regex pairs specify the metrics that the algorithm emits.', default='{}') - parser.add_argument('--strategy', choices=['Bayesian', 'Random'], type=str.strip, required=False, help='How hyperparameter tuning chooses the combinations of hyperparameter values to use for the training job it launches.', default='Bayesian') - parser.add_argument('--metric_name', type=str.strip, required=True, help='The name of the metric to use for the objective metric.') - parser.add_argument('--metric_type', choices=['Maximize', 'Minimize'], type=str.strip, required=True, help='Whether to minimize or maximize the objective metric.') - parser.add_argument('--early_stopping_type', choices=['Off', 'Auto'], type=str.strip, required=False, help='Whether to minimize or maximize the objective metric.', default='Off') - parser.add_argument('--static_parameters', type=_utils.str_to_json_dict, required=False, help='The values of hyperparameters that do not change for the tuning job.', default='{}') - parser.add_argument('--integer_parameters', type=_utils.str_to_json_list, required=False, help='The array of IntegerParameterRange objects that specify ranges of integer hyperparameters that you want to search.', default='[]') - parser.add_argument('--continuous_parameters', type=_utils.str_to_json_list, required=False, help='The array of ContinuousParameterRange objects that specify ranges of continuous hyperparameters that you want to search.', default='[]') - parser.add_argument('--categorical_parameters', type=_utils.str_to_json_list, required=False, help='The array of CategoricalParameterRange objects that specify ranges of categorical hyperparameters that you want to search.', default='[]') - parser.add_argument('--channels', type=_utils.str_to_json_list, required=True, help='A list of dicts specifying the input channels. Must have at least one.') - parser.add_argument('--data_location_1', type=str.strip, required=False, help='The S3 URI of the input data source for channel 1.', default='') - parser.add_argument('--data_location_2', type=str.strip, required=False, help='The S3 URI of the input data source for channel 2.', default='') - parser.add_argument('--data_location_3', type=str.strip, required=False, help='The S3 URI of the input data source for channel 3.', default='') - parser.add_argument('--data_location_4', type=str.strip, required=False, help='The S3 URI of the input data source for channel 4.', default='') - parser.add_argument('--data_location_5', type=str.strip, required=False, help='The S3 URI of the input data source for channel 5.', default='') - parser.add_argument('--data_location_6', type=str.strip, required=False, help='The S3 URI of the input data source for channel 6.', default='') - parser.add_argument('--data_location_7', type=str.strip, required=False, help='The S3 URI of the input data source for channel 7.', default='') - parser.add_argument('--data_location_8', type=str.strip, required=False, help='The S3 URI of the input data source for channel 8.', default='') - parser.add_argument('--output_location', type=str.strip, required=True, help='The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job.') - parser.add_argument('--output_encryption_key', type=str.strip, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.', default='') - parser.add_argument('--instance_type', choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge'], type=str.strip, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') - parser.add_argument('--instance_count', type=_utils.str_to_int, required=False, help='The number of ML compute instances to use in each training job.', default=1) - parser.add_argument('--volume_size', type=_utils.str_to_int, required=False, help='The size of the ML storage volume that you want to provision.', default=1) - parser.add_argument('--max_num_jobs', type=_utils.str_to_int, required=True, help='The maximum number of training jobs that a hyperparameter tuning job can launch.') - parser.add_argument('--max_parallel_jobs', type=_utils.str_to_int, required=True, help='The maximum number of concurrent training jobs that a hyperparameter tuning job can launch.') - parser.add_argument('--max_run_time', type=_utils.str_to_int, required=False, help='The maximum run time in seconds per training job.', default=86400) - parser.add_argument('--resource_encryption_key', type=str.strip, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') - parser.add_argument('--vpc_security_group_ids', type=str.strip, required=False, help='The VPC security group IDs, in the form sg-xxxxxxxx.') - parser.add_argument('--vpc_subnets', type=str.strip, required=False, help='The ID of the subnets in the VPC to which you want to connect your hpo job.') + + parser.add_argument('--job_name', type=str, required=False, help='The name of the tuning job. Must be unique within the same AWS account and AWS region.') + parser.add_argument('--role', type=str, required=True, help='The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.') + parser.add_argument('--image', type=str, required=False, help='The registry path of the Docker image that contains the training algorithm.', default='') + parser.add_argument('--algorithm_name', type=str, required=False, help='The name of the resource algorithm to use for the hyperparameter tuning job.', default='') + parser.add_argument('--training_input_mode', choices=['File', 'Pipe'], type=str, required=False, help='The input mode that the algorithm supports. File or Pipe.', default='File') + parser.add_argument('--metric_definitions', type=_utils.yaml_or_json_str, required=False, help='The dictionary of name-regex pairs specify the metrics that the algorithm emits.', default={}) + parser.add_argument('--strategy', choices=['Bayesian', 'Random'], type=str, required=False, help='How hyperparameter tuning chooses the combinations of hyperparameter values to use for the training job it launches.', default='Bayesian') + parser.add_argument('--metric_name', type=str, required=True, help='The name of the metric to use for the objective metric.') + parser.add_argument('--metric_type', choices=['Maximize', 'Minimize'], type=str, required=True, help='Whether to minimize or maximize the objective metric.') + parser.add_argument('--early_stopping_type', choices=['Off', 'Auto'], type=str, required=False, help='Whether to minimize or maximize the objective metric.', default='Off') + parser.add_argument('--static_parameters', type=_utils.yaml_or_json_str, required=False, help='The values of hyperparameters that do not change for the tuning job.', default={}) + parser.add_argument('--integer_parameters', type=_utils.yaml_or_json_str, required=False, help='The array of IntegerParameterRange objects that specify ranges of integer hyperparameters that you want to search.', default=[]) + parser.add_argument('--continuous_parameters', type=_utils.yaml_or_json_str, required=False, help='The array of ContinuousParameterRange objects that specify ranges of continuous hyperparameters that you want to search.', default=[]) + parser.add_argument('--categorical_parameters', type=_utils.yaml_or_json_str, required=False, help='The array of CategoricalParameterRange objects that specify ranges of categorical hyperparameters that you want to search.', default=[]) + parser.add_argument('--channels', type=_utils.yaml_or_json_str, required=True, help='A list of dicts specifying the input channels. Must have at least one.') + parser.add_argument('--output_location', type=str, required=True, help='The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job.') + parser.add_argument('--output_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.', default='') + parser.add_argument('--instance_type', type=str, required=False, help='The ML compute instance type.', default='ml.m4.xlarge') + parser.add_argument('--instance_count', type=int, required=False, help='The number of ML compute instances to use in each training job.', default=1) + parser.add_argument('--volume_size', type=int, required=False, help='The size of the ML storage volume that you want to provision.', default=30) + parser.add_argument('--max_num_jobs', type=int, required=True, help='The maximum number of training jobs that a hyperparameter tuning job can launch.') + parser.add_argument('--max_parallel_jobs', type=int, required=True, help='The maximum number of concurrent training jobs that a hyperparameter tuning job can launch.') + parser.add_argument('--max_run_time', type=int, required=False, help='The maximum run time in seconds per training job.', default=86400) + parser.add_argument('--resource_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') + parser.add_argument('--vpc_security_group_ids', type=str, required=False, help='The VPC security group IDs, in the form sg-xxxxxxxx.') + parser.add_argument('--vpc_subnets', type=str, required=False, help='The ID of the subnets in the VPC to which you want to connect your hpo job.') parser.add_argument('--network_isolation', type=_utils.str_to_bool, required=False, help='Isolates the training container.', default=True) parser.add_argument('--traffic_encryption', type=_utils.str_to_bool, required=False, help='Encrypts all communications between ML compute instances in distributed training.', default=False) - parser.add_argument('--warm_start_type', choices=['IdenticalDataAndAlgorithm', 'TransferLearning', ''], type=str.strip, required=False, help='Specifies either "IdenticalDataAndAlgorithm" or "TransferLearning"') - parser.add_argument('--parent_hpo_jobs', type=str.strip, required=False, help='List of previously completed or stopped hyperparameter tuning jobs to be used as a starting point.', default='') + parser.add_argument('--warm_start_type', choices=['IdenticalDataAndAlgorithm', 'TransferLearning', ''], type=str, required=False, help='Specifies either "IdenticalDataAndAlgorithm" or "TransferLearning"') + parser.add_argument('--parent_hpo_jobs', type=str, required=False, help='List of previously completed or stopped hyperparameter tuning jobs to be used as a starting point.', default='') ### Start spot instance support parser.add_argument('--spot_instance', type=_utils.str_to_bool, required=False, help='Use managed spot training.', default=False) - parser.add_argument('--max_wait_time', type=_utils.str_to_int, required=False, help='The maximum time in seconds you are willing to wait for a managed spot training job to complete.', default=86400) - parser.add_argument('--checkpoint_config', type=_utils.str_to_json_dict, required=False, help='Dictionary of information about the output location for managed spot training checkpoint data.', default='{}') + parser.add_argument('--max_wait_time', type=int, required=False, help='The maximum time in seconds you are willing to wait for a managed spot training job to complete.', default=86400) + parser.add_argument('--checkpoint_config', type=_utils.yaml_or_json_str, required=False, help='Dictionary of information about the output location for managed spot training checkpoint data.', default={}) ### End spot instance support - parser.add_argument('--tags', type=_utils.str_to_json_dict, required=False, help='An array of key-value pairs, to categorize AWS resources.', default='{}') + parser.add_argument('--tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) return parser def main(argv=None): parser = create_parser() - args = parser.parse_args() + args = parser.parse_args(argv) logging.getLogger().setLevel(logging.INFO) client = _utils.get_sagemaker_client(args.region) @@ -100,4 +90,4 @@ def main(argv=None): if __name__== "__main__": - main() + main(sys.argv[1:]) diff --git a/components/aws/sagemaker/model/README.md b/components/aws/sagemaker/model/README.md new file mode 100644 index 00000000000..079cdb4abbb --- /dev/null +++ b/components/aws/sagemaker/model/README.md @@ -0,0 +1,77 @@ +# SageMaker Hosting Services - Create Model Kubeflow Pipeline component + +## Summary +Component to create a model in SageMaker from a Kubeflow Pipelines workflow. + +## Details +Deploying a model using Amazon SageMaker hosting services is a three-step process: + +1. **Create a model in Amazon SageMaker** - Specify the S3 path where model artifacts are stored and Docker registry path for the image that contains the inference code +2. **Create an endpoint configuration for an HTTPS endpoint** - Specify the name of model in production variants and the type of instance that you want Amazon SageMaker to launch to host the model. +3. **Create an HTTPS endpoint** - Launch the ML compute instances and deploy the model as specified in the endpoint configuration + +This component handles Step 1. Step 2 and 3 can be done using the [deploy component](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/deploy) for AWS SageMaker. + +## Intended Use +Create a model in Amazon SageMaker to be used for [creating an endpoint](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/deploy) in hosting services or [run a batch transform job](https://github.com/kubeflow/pipelines/tree/master/components/aws/sagemaker/batch_transform). + +## Runtime Arguments +Argument | Description | Optional (in pipeline definition) | Optional (in UI) | Data type | Accepted values | Default | +:--- | :---------- | :---------- | :---------- | :----------| :---------- | :----------| +region | The region where the model is created | No | No | String | | | +endpoint_url | The endpoint URL for the private link VPC endpoint | Yes | Yes | String | | | +tags | Key-value pairs to tag the model created in AWS | Yes | Yes | Dict | | {} | +role | The ARN of the IAM role that Amazon SageMaker can assume to access model artifacts and docker image for deployment on ML compute instances or for batch transform jobs | No | No | String | | | +network_isolation | Isolates the model container. No inbound or outbound network calls can be made to or from the model container | Yes | Yes | Boolean | | True | +model_name | The name of the new model | No | No | String | | | +vpc_subnets | The ID of the subnets in the VPC to which you want to connect your training job or model | No if `vpc_security_group_ids` is specified | No if `vpc_security_group_ids` is specified | Array of Strings | | | +vpc_security_group_ids | The security groups for the VPC that is specified in the vpc_subnets field | No if `vpc_subnets` is specified | No if `vpc_subnets` is specified | Array of Strings | | | + +The docker image containing inference code, associated artifacts, and environment map that the inference code uses when the model is deployed for predictions make up the [`ContainerDefinition`](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_ContainerDefinition.html) object in CreateModel API. The following parameters(except secondary_containers) describes the container, as part of model definition: + +Argument | Description | Optional (in pipeline definition) | Optional (in UI) | Data type | Accepted values | Default | +:--- | :---------- | :---------- | :---------- | :----------| :---------- | :----------| +container_host_name | When a ContainerDefinition is part of an [inference pipeline](https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipelines.html), the value of the parameter uniquely identifies the container for the purposes of [logging and metrics](https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipeline-logs-metrics.html) | Yes | Yes | String | Length Constraints: Maximum length of 63. Pattern: `^[a-zA-Z0-9](-*[a-zA-Z0-9])*` | | +environment | The environment variables to set in the Docker container | Yes | Yes | Dict | Maximum length of 1024. Key Pattern: `[a-zA-Z_][a-zA-Z0-9_]*`. Value Pattern: `[\S\s]*` | {} | +image | The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored | Yes | Yes | String | | | +model_artifact_url | The S3 path where the model artifacts are stored. This path must point to a single gzip compressed tar archive (.tar.gz suffix) | Yes | Yes | String | | | +model_package | The name or Amazon Resource Name (ARN) of the model package to use to create the model | Yes | Yes | String | | | +secondary_containers | List of ContainerDefinition dictionaries in form of string (see Notes below) | Yes | Yes | String| Maximum number of 5 items | | + +Notes: +* Please use the links in the [Resources section](#Resources) for detailed information on each input parameter and SageMaker APIs used in this component +* If you don't specify a value for `container_host_name` parameter for a `ContainerDefinition` that is part of an [inference pipeline](https://docs.aws.amazon.com/sagemaker/latest/dg/inference-pipelines.html), a unique name is automatically assigned based on the position of the `ContainerDefinition` in the pipeline. If you specify a value for the `ContainerHostName` for any `ContainerDefinition` that is part of an inference pipeline, you must specify a value for the `ContainerHostName` parameter of every `ContainerDefinition` in that pipeline. +* Each key and value in the `Environment` parameter string to string map can have length of up to 1024. SageMaker supports up to 16 entries in the map. +* Input format to specify `secondary_containers` is: +``` +[ + { + "ContainerHostname": "string", + "Environment": { + "string" : "string" + }, + "Image": "string", + "ModelDataUrl": "string", + "ModelPackageName": "string" + } + ] +``` +* Specify either an `image` and `model_artifact_url`, OR a `model_package` in the `ContainerDefinition`. +* If you have a single container to define the model, use the parameters `container_host_name`, `environment`, `image`, `model_artifact_url`, `model_package` directly to define the primary container. + +## Outputs +Name | Description +:--- | :---------- +model_name | The name of the model created in Amazon SageMaker + +## Requirements +* [Kubeflow pipelines SDK](https://www.kubeflow.org/docs/pipelines/sdk/install-sdk/) +* [Kubeflow set-up](https://www.kubeflow.org/docs/aws/deploy/install-kubeflow/) + +## Samples +### Integrated into a pipeline +MNIST Classification pipeline: [Pipeline](https://github.com/kubeflow/pipelines/blob/master/samples/contrib/aws-samples/mnist-kmeans-sagemaker/mnist-classification-pipeline.py) | [Steps](https://github.com/kubeflow/pipelines/blob/master/samples/contrib/aws-samples/mnist-kmeans-sagemaker/README.md) + +## Resources +* [Create Model API documentation](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_CreateModel.html) +* [Boto3 API reference](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sagemaker.html#SageMaker.Client.create_model) diff --git a/components/aws/sagemaker/model/component.yaml b/components/aws/sagemaker/model/component.yaml index 5bc7a4d0770..31d66ef4cc9 100644 --- a/components/aws/sagemaker/model/component.yaml +++ b/components/aws/sagemaker/model/component.yaml @@ -4,49 +4,63 @@ description: | inputs: - name: region description: 'The region where the training job launches.' + type: String - name: model_name description: 'The name of the new model.' + type: String - name: role description: 'The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.' + type: String - name: container_host_name description: 'When a ContainerDefinition is part of an inference pipeline, this value uniquely identifies the container for the purposes of logging and metrics.' default: '' + type: String - name: image description: 'The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.' default: '' + type: String - name: model_artifact_url description: 'S3 path where Amazon SageMaker to store the model artifacts.' default: '' + type: String - name: environment description: 'The dictionary of the environment variables to set in the Docker container. Up to 16 key-value entries in the map.' default: '{}' + type: JsonObject - name: model_package description: 'The name or Amazon Resource Name (ARN) of the model package to use to create the model.' default: '' + type: String - name: secondary_containers description: 'A list of dicts that specifies the additional containers in the inference pipeline.' default: '[]' + type: JsonArray - name: vpc_security_group_ids description: 'The VPC security group IDs, in the form sg-xxxxxxxx.' default: '' + type: String - name: vpc_subnets description: 'The ID of the subnets in the VPC to which you want to connect your hpo job.' default: '' + type: String - name: network_isolation description: 'Isolates the training container.' default: 'True' + type: Bool - name: endpoint_url description: 'The endpoint URL for the private link VPC endpoint.' default: '' + type: String - name: tags description: 'Key-value pairs to categorize AWS resources.' default: '{}' + type: JsonObject outputs: - {name: model_name, description: 'The model name Sagemaker created'} implementation: container: - image: redbackthomson/aws-kubeflow-sagemaker:20191003 - command: ['python'] + image: amazon/aws-sagemaker-kfp-components:0.3.1 + command: ['python3'] args: [ create_model.py, --region, {inputValue: region}, diff --git a/components/aws/sagemaker/model/src/create_model.py b/components/aws/sagemaker/model/src/create_model.py index 7fbe211d542..c6fcebd246c 100644 --- a/components/aws/sagemaker/model/src/create_model.py +++ b/components/aws/sagemaker/model/src/create_model.py @@ -19,18 +19,18 @@ def create_parser(): parser = argparse.ArgumentParser(description='SageMaker Training Job') _utils.add_default_client_arguments(parser) - parser.add_argument('--model_name', type=str.strip, required=True, help='The name of the new model.') - parser.add_argument('--role', type=str.strip, required=True, help='The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.') - parser.add_argument('--container_host_name', type=str.strip, required=False, help='When a ContainerDefinition is part of an inference pipeline, this value uniquely identifies the container for the purposes of logging and metrics.', default='') - parser.add_argument('--image', type=str.strip, required=False, help='The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.', default='') - parser.add_argument('--model_artifact_url', type=str.strip, required=False, help='S3 path where Amazon SageMaker to store the model artifacts.', default='') - parser.add_argument('--environment', type=_utils.str_to_json_dict, required=False, help='The dictionary of the environment variables to set in the Docker container. Up to 16 key-value entries in the map.', default='{}') - parser.add_argument('--model_package', type=str.strip, required=False, help='The name or Amazon Resource Name (ARN) of the model package to use to create the model.', default='') - parser.add_argument('--secondary_containers', type=_utils.str_to_json_list, required=False, help='A list of dicts that specifies the additional containers in the inference pipeline.', default='{}') - parser.add_argument('--vpc_security_group_ids', type=str.strip, required=False, help='The VPC security group IDs, in the form sg-xxxxxxxx.', default='') - parser.add_argument('--vpc_subnets', type=str.strip, required=False, help='The ID of the subnets in the VPC to which you want to connect your hpo job.', default='') + parser.add_argument('--model_name', type=str, required=True, help='The name of the new model.') + parser.add_argument('--role', type=str, required=True, help='The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.') + parser.add_argument('--container_host_name', type=str, required=False, help='When a ContainerDefinition is part of an inference pipeline, this value uniquely identifies the container for the purposes of logging and metrics.', default='') + parser.add_argument('--image', type=str, required=False, help='The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.', default='') + parser.add_argument('--model_artifact_url', type=str, required=False, help='S3 path where Amazon SageMaker to store the model artifacts.', default='') + parser.add_argument('--environment', type=_utils.yaml_or_json_str, required=False, help='The dictionary of the environment variables to set in the Docker container. Up to 16 key-value entries in the map.', default={}) + parser.add_argument('--model_package', type=str, required=False, help='The name or Amazon Resource Name (ARN) of the model package to use to create the model.', default='') + parser.add_argument('--secondary_containers', type=_utils.yaml_or_json_str, required=False, help='A list of dicts that specifies the additional containers in the inference pipeline.', default={}) + parser.add_argument('--vpc_security_group_ids', type=str, required=False, help='The VPC security group IDs, in the form sg-xxxxxxxx.', default='') + parser.add_argument('--vpc_subnets', type=str, required=False, help='The ID of the subnets in the VPC to which you want to connect your hpo job.', default='') parser.add_argument('--network_isolation', type=_utils.str_to_bool, required=False, help='Isolates the training container.', default=True) - parser.add_argument('--tags', type=_utils.str_to_json_dict, required=False, help='An array of key-value pairs, to categorize AWS resources.', default='{}') + parser.add_argument('--tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) return parser diff --git a/components/aws/sagemaker/run_tests.py b/components/aws/sagemaker/run_tests.py deleted file mode 100644 index c2ecf027b41..00000000000 --- a/components/aws/sagemaker/run_tests.py +++ /dev/null @@ -1,20 +0,0 @@ -# Configures and runs the unit tests for all the components - -import os -import sys -import unittest - - -# Taken from http://stackoverflow.com/a/17004263/2931197 -def load_and_run_tests(): - setup_file = sys.modules['__main__'].__file__ - setup_dir = os.path.abspath(os.path.dirname(setup_file)) - - test_loader = unittest.defaultTestLoader - test_runner = unittest.TextTestRunner() - test_suite = test_loader.discover(setup_dir, pattern="test_*.py") - - test_runner.run(test_suite) - -if __name__ == '__main__': - load_and_run_tests() \ No newline at end of file diff --git a/components/aws/sagemaker/test/test_hpo.py b/components/aws/sagemaker/test/test_hpo.py deleted file mode 100644 index 07c634e1107..00000000000 --- a/components/aws/sagemaker/test/test_hpo.py +++ /dev/null @@ -1,54 +0,0 @@ -import json -import unittest - -from unittest.mock import patch, Mock, MagicMock -from botocore.exceptions import ClientError -from datetime import datetime - -from hyperparameter_tuning.src import hyperparameter_tuning as hpo -from common import _utils - -required_args = [ - '--region', 'us-west-2', - '--role', 'arn:aws:iam::123456789012:user/Development/product_1234/*', - '--image', 'test-image', - '--metric_name', 'test-metric', - '--metric_type', 'Maximize', - '--channels', '[{"ChannelName": "train", "DataSource": {"S3DataSource":{"S3Uri": "s3://fake-bucket/data","S3DataType":"S3Prefix","S3DataDistributionType": "FullyReplicated"}},"ContentType":"","CompressionType": "None","RecordWrapperType":"None","InputMode": "File"}]', - '--output_location', 'test-output-location', - '--max_num_jobs', '5', - '--max_parallel_jobs', '2' -] - -class HyperparameterTestCase(unittest.TestCase): - @classmethod - def setUpClass(cls): - parser = hpo.create_parser() - cls.parser = parser - - def test_spot_bad_args(self): - no_max_wait_args = self.parser.parse_args(required_args + ['--spot_instance', 'True']) - no_checkpoint_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600']) - no_s3_uri_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600', '--checkpoint_config', '{}']) - - for arg in [no_max_wait_args, no_checkpoint_args, no_s3_uri_args]: - with self.assertRaises(Exception): - _utils.create_hyperparameter_tuning_job_request(vars(arg)) - - def test_spot_lesser_wait_time(self): - args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '86399', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/", "LocalPath": "local-path"}']) - with self.assertRaises(Exception): - _utils.create_hyperparameter_tuning_job_request(vars(args)) - - def test_spot_good_args(self): - good_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '86400', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/"}']) - response = _utils.create_hyperparameter_tuning_job_request(vars(good_args)) - self.assertTrue(response['TrainingJobDefinition']['EnableManagedSpotTraining']) - self.assertEqual(response['TrainingJobDefinition']['StoppingCondition']['MaxWaitTimeInSeconds'], 86400) - self.assertEqual(response['TrainingJobDefinition']['CheckpointConfig']['S3Uri'], 's3://fake-uri/') - - def test_spot_local_path(self): - args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '86400', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/", "LocalPath": "local-path"}']) - response = _utils.create_hyperparameter_tuning_job_request(vars(args)) - self.assertEqual(response['TrainingJobDefinition']['CheckpointConfig']['S3Uri'], 's3://fake-uri/') - self.assertEqual(response['TrainingJobDefinition']['CheckpointConfig']['LocalPath'], 'local-path') \ No newline at end of file diff --git a/components/aws/sagemaker/test/test_train.py b/components/aws/sagemaker/test/test_train.py deleted file mode 100644 index 0b3440e3bb1..00000000000 --- a/components/aws/sagemaker/test/test_train.py +++ /dev/null @@ -1,54 +0,0 @@ -import json -import unittest - -from unittest.mock import patch, Mock, MagicMock -from botocore.exceptions import ClientError -from datetime import datetime - -from train.src import train -from common import _utils - -required_args = [ - '--region', 'us-west-2', - '--role', 'arn:aws:iam::123456789012:user/Development/product_1234/*', - '--image', 'test-image', - '--channels', '[{"ChannelName": "train", "DataSource": {"S3DataSource":{"S3Uri": "s3://fake-bucket/data","S3DataType":"S3Prefix","S3DataDistributionType": "FullyReplicated"}},"ContentType":"","CompressionType": "None","RecordWrapperType":"None","InputMode": "File"}]', - '--instance_type', 'ml.m4.xlarge', - '--instance_count', '1', - '--volume_size', '50', - '--max_run_time', '3600', - '--model_artifact_path', 'test-path' -] - -class TrainTestCase(unittest.TestCase): - @classmethod - def setUpClass(cls): - parser = train.create_parser() - cls.parser = parser - - def test_spot_bad_args(self): - no_max_wait_args = self.parser.parse_args(required_args + ['--spot_instance', 'True']) - no_checkpoint_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600']) - no_s3_uri_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600', '--checkpoint_config', '{}']) - - for arg in [no_max_wait_args, no_checkpoint_args, no_s3_uri_args]: - with self.assertRaises(Exception): - _utils.create_training_job_request(vars(arg)) - - def test_spot_lesser_wait_time(self): - args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3599', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/", "LocalPath": "local-path"}']) - with self.assertRaises(Exception): - _utils.create_training_job_request(vars(args)) - - def test_spot_good_args(self): - good_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/"}']) - response = _utils.create_training_job_request(vars(good_args)) - self.assertTrue(response['EnableManagedSpotTraining']) - self.assertEqual(response['StoppingCondition']['MaxWaitTimeInSeconds'], 3600) - self.assertEqual(response['CheckpointConfig']['S3Uri'], 's3://fake-uri/') - - def test_spot_local_path(self): - args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/", "LocalPath": "local-path"}']) - response = _utils.create_training_job_request(vars(args)) - self.assertEqual(response['CheckpointConfig']['S3Uri'], 's3://fake-uri/') - self.assertEqual(response['CheckpointConfig']['LocalPath'], 'local-path') \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/.env.example b/components/aws/sagemaker/tests/integration_tests/.env.example new file mode 100644 index 00000000000..33c04cd60f8 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/.env.example @@ -0,0 +1,12 @@ +# If you would like to override the credentials for the container +# AWS_ACCESS_KEY_ID= +# AWS_SECRET_ACCESS_KEY= +# AWS_SESSION_TOKEN= + +REGION=us-east-1 + +SAGEMAKER_EXECUTION_ROLE_ARN=arn:aws:iam::123456789012:role/service-role/AmazonSageMaker-ExecutionRole-Example +S3_DATA_BUCKET=my-data-bucket + +# If you hope to use an existing EKS cluster, rather than creating a new one. +# EKS_EXISTING_CLUSTER=my-eks-cluster \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/.flake8 b/components/aws/sagemaker/tests/integration_tests/.flake8 new file mode 100644 index 00000000000..e9bfde932c0 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/.flake8 @@ -0,0 +1,5 @@ +[flake8] +max-line-length = 120 +extend-ignore = + # See https://github.com/PyCQA/pycodestyle/issues/373 + E203, \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/Dockerfile b/components/aws/sagemaker/tests/integration_tests/Dockerfile new file mode 100644 index 00000000000..75c66f8c1bd --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/Dockerfile @@ -0,0 +1,43 @@ +FROM continuumio/miniconda:4.7.12 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + wget \ + git \ + jq + +# Install eksctl +RUN curl --location "https://github.com/weaveworks/eksctl/releases/download/0.19.0/eksctl_$(uname -s)_amd64.tar.gz" | tar xz -C /tmp \ + && mv /tmp/eksctl /usr/local/bin + +# Install aws-iam-authenticator +RUN curl -S -o /usr/local/bin/aws-iam-authenticator https://amazon-eks.s3.us-west-2.amazonaws.com/1.16.8/2020-04-16/bin/linux/amd64/aws-iam-authenticator \ + && chmod +x /usr/local/bin/aws-iam-authenticator + +# Install Kubectl +RUN curl -LO https://storage.googleapis.com/kubernetes-release/release/v1.18.0/bin/linux/amd64/kubectl \ + && chmod +x ./kubectl \ + && mv ./kubectl /usr/local/bin/kubectl + +# Install Argo CLI +RUN curl -sSL -o /usr/local/bin/argo https://github.com/argoproj/argo/releases/download/v2.8.0/argo-linux-amd64 \ + && chmod +x /usr/local/bin/argo + +# Copy conda environment early to avoid cache busting +COPY ./sagemaker/tests/integration_tests/environment.yml environment.yml + +# Create conda environment for running tests and set as start-up environment +RUN conda env create -f environment.yml +RUN echo "source activate kfp_test_env" > ~/.bashrc +ENV PATH "/opt/conda/envs/kfp_test_env/bin":$PATH + +# Environment variables to be used by tests +ENV REGION="us-west-2" +ENV SAGEMAKER_EXECUTION_ROLE_ARN="arn:aws:iam::1234567890:role/sagemaker-role" +ENV S3_DATA_BUCKET="kfp-test-data" +ENV MINIO_LOCAL_PORT=9000 +ENV KFP_NAMESPACE="kubeflow" + +COPY ./sagemaker/ . + +ENTRYPOINT [ "/bin/bash", "./tests/integration_tests/scripts/run_integration_tests" ] \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/README.md b/components/aws/sagemaker/tests/integration_tests/README.md new file mode 100644 index 00000000000..6076956ef52 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/README.md @@ -0,0 +1,25 @@ +## Requirements +1. [Docker](https://www.docker.com/) +1. [IAM Role](https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-roles.html) with a SageMakerFullAccess and AmazonS3FullAccess +1. IAM User credentials with SageMakerFullAccess, AWSCloudFormationFullAccess, IAMFullAccess, AmazonEC2FullAccess, AmazonS3FullAccess permissions +2. The SageMaker WorkTeam and GroundTruth Component tests expect that at least one private workteam already exists in the region where you are running these tests. + + +## Creating S3 buckets with datasets + +1. In the following Python script, change the bucket name and run the [`s3_sample_data_creator.py`](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/mnist-kmeans-sagemaker#the-sample-dataset) to create an S3 bucket with the sample mnist dataset in the region where you want to run the tests. +2. To prepare the dataset for the SageMaker GroundTruth Component test, follow the steps in the `[GroundTruth Sample README](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/ground_truth_pipeline_demo#prep-the-dataset-label-categories-and-ui-template)`. + + +## Step to run integration tests +1. Copy the `.env.example` file to `.env` and in the following steps modify the fields of this new file: + 1. Configure the AWS credentials fields with those of your IAM User. + 1. Update the `SAGEMAKER_EXECUTION_ROLE_ARN` with that of your role created earlier. + 1. Update the `S3_DATA_BUCKET` parameter with the name of the bucket created earlier. + 1. (Optional) If you have already created an EKS cluster for testing, replace the `EKS_EXISTING_CLUSTER` field with it's name. +1. Build the image by doing the following: + 1. Navigate to the `components/aws` directory. + 1. Run `docker build . -f sagemaker/tests/integration_tests/Dockerfile -t amazon/integration_test` +1. Run the image, injecting your environment variable files: + 1. Navigate to the `components/aws` directory. + 1. Run `docker run --env-file sagemaker/tests/integration_tests/.env amazon/integration_test` \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_batch_transform_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_batch_transform_component.py new file mode 100644 index 00000000000..77f35fada97 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_batch_transform_component.py @@ -0,0 +1,85 @@ +import utils +import os +import pytest + +from utils import kfp_client_utils +from utils import minio_utils +from utils import sagemaker_utils +from utils import s3_utils + + +@pytest.mark.parametrize( + "test_file_dir", + [ + pytest.param( + "resources/config/kmeans-mnist-batch-transform", + marks=pytest.mark.canary_test, + ) + ], +) +def test_transform_job( + kfp_client, + experiment_id, + s3_client, + sagemaker_client, + s3_data_bucket, + test_file_dir, +): + + download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) + test_params = utils.load_params( + utils.replace_placeholders( + os.path.join(test_file_dir, "config.yaml"), + os.path.join(download_dir, "config.yaml"), + ) + ) + + # Generate random prefix for model, job name to avoid errors if resources with same name exists + test_params["Arguments"]["model_name"] = test_params["Arguments"][ + "job_name" + ] = input_job_name = ( + utils.generate_random_string(5) + "-" + test_params["Arguments"]["model_name"] + ) + print(f"running test with model/job name: {input_job_name}") + + # Generate unique location for output since output filename is generated according to the content_type + test_params["Arguments"]["output_location"] = os.path.join( + test_params["Arguments"]["output_location"], input_job_name + ) + + _, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( + kfp_client, + experiment_id, + test_params["PipelineDefinition"], + test_params["Arguments"], + download_dir, + test_params["TestName"], + test_params["Timeout"], + ) + + outputs = {"sagemaker-batch-transformation": ["output_location"]} + + output_files = minio_utils.artifact_download_iterator( + workflow_json, outputs, download_dir + ) + + # Verify Job was successful on SageMaker + response = sagemaker_utils.describe_transform_job(sagemaker_client, input_job_name) + assert response["TransformJobStatus"] == "Completed" + assert response["TransformJobName"] == input_job_name + + # Verify output location from pipeline matches job output and that the transformed file exists + output_location = utils.read_from_file_in_tar( + output_files["sagemaker-batch-transformation"]["output_location"], "data", + ) + print(f"output location: {output_location}") + assert output_location == response["TransformOutput"]["S3OutputPath"] + # Get relative path of file in S3 bucket + # URI is following format s3:///relative/path/to/file + # split below is to extract the part after bucket name + file_key = os.path.join( + "/".join(output_location.split("/")[3:]), test_params["ExpectedOutputFile"] + ) + assert s3_utils.check_object_exists(s3_client, s3_data_bucket, file_key) + + utils.remove_dir(download_dir) diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_deploy_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_deploy_component.py new file mode 100644 index 00000000000..9223f1d1c02 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_deploy_component.py @@ -0,0 +1,114 @@ +import pytest +import os +import utils +import io +import numpy +import json +import pickle +import gzip + +from utils import kfp_client_utils +from utils import minio_utils +from utils import sagemaker_utils + + +def run_predict_mnist(boto3_session, endpoint_name, download_dir): + """ https://github.com/awslabs/amazon-sagemaker-examples/blob/a8c20eeb72dc7d3e94aaaf28be5bf7d7cd5695cb + /sagemaker-python-sdk/1P_kmeans_lowlevel/kmeans_mnist_lowlevel.ipynb """ + # Download and load dataset + region = boto3_session.region_name + download_path = os.path.join(download_dir, "mnist.pkl.gz") + boto3_session.resource("s3", region_name=region).Bucket( + "sagemaker-sample-data-{}".format(region) + ).download_file("algorithms/kmeans/mnist/mnist.pkl.gz", download_path) + with gzip.open(download_path, "rb") as f: + train_set, valid_set, test_set = pickle.load(f, encoding="latin1") + + # Function to create a csv from numpy array + def np2csv(arr): + csv = io.BytesIO() + numpy.savetxt(csv, arr, delimiter=",", fmt="%g") + return csv.getvalue().decode().rstrip() + + # Run prediction on an image + runtime = boto3_session.client("sagemaker-runtime") + payload = np2csv(train_set[0][30:31]) + + response = runtime.invoke_endpoint( + EndpointName=endpoint_name, ContentType="text/csv", Body=payload, + ) + return json.loads(response["Body"].read().decode()) + + +@pytest.mark.parametrize( + "test_file_dir", + [ + pytest.param( + "resources/config/kmeans-mnist-endpoint", marks=pytest.mark.canary_test + ) + ], +) +def test_create_endpoint( + kfp_client, experiment_id, boto3_session, sagemaker_client, test_file_dir +): + + download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) + test_params = utils.load_params( + utils.replace_placeholders( + os.path.join(test_file_dir, "config.yaml"), + os.path.join(download_dir, "config.yaml"), + ) + ) + + # Generate random prefix for model, endpoint config and endpoint name + # to avoid errors if resources with same name exists + test_params["Arguments"]["model_name"] = test_params["Arguments"][ + "endpoint_config_name" + ] = test_params["Arguments"]["endpoint_name"] = input_endpoint_name = ( + utils.generate_random_string(5) + "-" + test_params["Arguments"]["model_name"] + ) + print(f"running test with model/endpoint name: {input_endpoint_name}") + + _, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( + kfp_client, + experiment_id, + test_params["PipelineDefinition"], + test_params["Arguments"], + download_dir, + test_params["TestName"], + test_params["Timeout"], + ) + + try: + outputs = {"sagemaker-deploy-model": ["endpoint_name"]} + + output_files = minio_utils.artifact_download_iterator( + workflow_json, outputs, download_dir + ) + + output_endpoint_name = utils.read_from_file_in_tar( + output_files["sagemaker-deploy-model"]["endpoint_name"], "endpoint_name.txt" + ) + print(f"endpoint name: {output_endpoint_name}") + + # Verify output from pipeline is endpoint name + assert output_endpoint_name == input_endpoint_name + + # Verify endpoint is running + assert ( + sagemaker_utils.describe_endpoint(sagemaker_client, input_endpoint_name)[ + "EndpointStatus" + ] + == "InService" + ) + + # Validate the model for use by running a prediction + result = run_predict_mnist(boto3_session, input_endpoint_name, download_dir) + print(f"prediction result: {result}") + assert json.dumps(result, sort_keys=True) == json.dumps( + test_params["ExpectedPrediction"], sort_keys=True + ) + utils.remove_dir(download_dir) + finally: + # delete endpoint + sagemaker_utils.delete_endpoint(sagemaker_client, input_endpoint_name) diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_groundtruth_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_groundtruth_component.py new file mode 100644 index 00000000000..18c0ad485d0 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_groundtruth_component.py @@ -0,0 +1,87 @@ +import pytest +import os +import json +import utils +from utils import kfp_client_utils +from utils import sagemaker_utils +from test_workteam_component import create_workteamjob +import time + + +@pytest.mark.parametrize( + "test_file_dir", + [ + pytest.param( + "resources/config/image-classification-groundtruth", + marks=pytest.mark.canary_test, + ) + ], +) +def test_groundtruth_labeling_job( + kfp_client, experiment_id, region, sagemaker_client, test_file_dir +): + + download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) + test_params = utils.load_params( + utils.replace_placeholders( + os.path.join(test_file_dir, "config.yaml"), + os.path.join(download_dir, "config.yaml"), + ) + ) + + # First create a workteam using a separate pipeline and get the name, arn of the workteam created. + workteam_name, _ = create_workteamjob( + kfp_client, + experiment_id, + region, + sagemaker_client, + "resources/config/create-workteam", + download_dir, + ) + + test_params["Arguments"][ + "workteam_arn" + ] = workteam_arn = sagemaker_utils.get_workteam_arn(sagemaker_client, workteam_name) + + # Generate the ground_truth_train_job_name based on the workteam which will be used for labeling. + test_params["Arguments"][ + "ground_truth_train_job_name" + ] = ground_truth_train_job_name = ( + test_params["Arguments"]["ground_truth_train_job_name"] + "-by-" + workteam_name + ) + + _ = kfp_client_utils.compile_run_monitor_pipeline( + kfp_client, + experiment_id, + test_params["PipelineDefinition"], + test_params["Arguments"], + download_dir, + test_params["TestName"], + test_params["Timeout"], + test_params["StatusToCheck"], + ) + + # Verify the GroundTruthJob was created in SageMaker and is InProgress. + # TODO: Add a bot to complete the labeling job and check for completion instead. + try: + response = sagemaker_utils.describe_labeling_job( + sagemaker_client, ground_truth_train_job_name + ) + assert response["LabelingJobStatus"] == "InProgress" + + # Verify that the workteam has the specified labeling job + labeling_jobs = sagemaker_utils.list_labeling_jobs_for_workteam( + sagemaker_client, workteam_arn + ) + assert len(labeling_jobs["LabelingJobSummaryList"]) == 1 + assert ( + labeling_jobs["LabelingJobSummaryList"][0]["LabelingJobName"] + == ground_truth_train_job_name + ) + finally: + # Cleanup the SageMaker Resources + sagemaker_utils.stop_labeling_job(sagemaker_client, ground_truth_train_job_name) + sagemaker_utils.delete_workteam(sagemaker_client, workteam_name) + + # Delete generated files + utils.remove_dir(download_dir) diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_hpo_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_hpo_component.py new file mode 100644 index 00000000000..4b264ba1ba8 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_hpo_component.py @@ -0,0 +1,118 @@ +import pytest +import os +import json +import utils + +from utils import kfp_client_utils +from utils import minio_utils +from utils import sagemaker_utils + + +@pytest.mark.parametrize( + "test_file_dir", + [pytest.param("resources/config/kmeans-mnist-hpo", marks=pytest.mark.canary_test)], +) +def test_hyperparameter_tuning( + kfp_client, experiment_id, region, sagemaker_client, test_file_dir +): + + download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) + test_params = utils.load_params( + utils.replace_placeholders( + os.path.join(test_file_dir, "config.yaml"), + os.path.join(download_dir, "config.yaml"), + ) + ) + + test_params["Arguments"]["channels"] = json.dumps( + test_params["Arguments"]["channels"] + ) + test_params["Arguments"]["static_parameters"] = json.dumps( + test_params["Arguments"]["static_parameters"] + ) + test_params["Arguments"]["integer_parameters"] = json.dumps( + test_params["Arguments"]["integer_parameters"] + ) + test_params["Arguments"]["categorical_parameters"] = json.dumps( + test_params["Arguments"]["categorical_parameters"] + ) + + _, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( + kfp_client, + experiment_id, + test_params["PipelineDefinition"], + test_params["Arguments"], + download_dir, + test_params["TestName"], + test_params["Timeout"], + ) + + outputs = { + "sagemaker-hyperparameter-tuning": [ + "best_hyperparameters", + "best_job_name", + "hpo_job_name", + "model_artifact_url", + "training_image", + ] + } + output_files = minio_utils.artifact_download_iterator( + workflow_json, outputs, download_dir + ) + + # Verify HPO job was successful on SageMaker + hpo_job_name = utils.read_from_file_in_tar( + output_files["sagemaker-hyperparameter-tuning"]["hpo_job_name"], + "hpo_job_name.txt", + ) + print(f"HPO job name: {hpo_job_name}") + hpo_response = sagemaker_utils.describe_hpo_job(sagemaker_client, hpo_job_name) + assert hpo_response["HyperParameterTuningJobStatus"] == "Completed" + + # Verify training image output is an ECR image + training_image = utils.read_from_file_in_tar( + output_files["sagemaker-hyperparameter-tuning"]["training_image"], + "training_image.txt", + ) + print(f"Training image used: {training_image}") + if "ExpectedTrainingImage" in test_params.keys(): + assert test_params["ExpectedTrainingImage"] == training_image + else: + assert f"dkr.ecr.{region}.amazonaws.com" in training_image + + # Verify Training job was part of HPO job, returned as best and was successful + best_training_job_name = utils.read_from_file_in_tar( + output_files["sagemaker-hyperparameter-tuning"]["best_job_name"], + "best_job_name.txt", + ) + print(f"best training job name: {best_training_job_name}") + train_response = sagemaker_utils.describe_training_job( + sagemaker_client, best_training_job_name + ) + assert train_response["TuningJobArn"] == hpo_response["HyperParameterTuningJobArn"] + assert ( + train_response["TrainingJobName"] + == hpo_response["BestTrainingJob"]["TrainingJobName"] + ) + assert train_response["TrainingJobStatus"] == "Completed" + + # Verify model artifacts output was generated from this run + model_artifact_url = utils.read_from_file_in_tar( + output_files["sagemaker-hyperparameter-tuning"]["model_artifact_url"], + "model_artifact_url.txt", + ) + print(f"model_artifact_url: {model_artifact_url}") + assert model_artifact_url == train_response["ModelArtifacts"]["S3ModelArtifacts"] + assert best_training_job_name in model_artifact_url + + # Verify hyper_parameters output is not empty + hyper_parameters = json.loads( + utils.read_from_file_in_tar( + output_files["sagemaker-hyperparameter-tuning"]["best_hyperparameters"], + "best_hyperparameters.txt", + ) + ) + print(f"HPO best hyperparameters: {json.dumps(hyper_parameters, indent = 2)}") + assert hyper_parameters is not None + + utils.remove_dir(download_dir) diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_model_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_model_component.py new file mode 100644 index 00000000000..f5a21008667 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_model_component.py @@ -0,0 +1,59 @@ +import pytest +import os +import utils + +from utils import kfp_client_utils +from utils import minio_utils +from utils import sagemaker_utils + + +@pytest.mark.parametrize( + "test_file_dir", + [ + pytest.param( + "resources/config/kmeans-mnist-model", marks=pytest.mark.canary_test + ) + ], +) +def test_createmodel(kfp_client, experiment_id, sagemaker_client, test_file_dir): + + download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) + test_params = utils.load_params( + utils.replace_placeholders( + os.path.join(test_file_dir, "config.yaml"), + os.path.join(download_dir, "config.yaml"), + ) + ) + + # Generate random prefix for model name to avoid errors if model with same name exists + test_params["Arguments"]["model_name"] = input_model_name = ( + utils.generate_random_string(5) + "-" + test_params["Arguments"]["model_name"] + ) + print(f"running test with model_name: {input_model_name}") + + _, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( + kfp_client, + experiment_id, + test_params["PipelineDefinition"], + test_params["Arguments"], + download_dir, + test_params["TestName"], + test_params["Timeout"], + ) + + outputs = {"sagemaker-create-model": ["model_name"]} + + output_files = minio_utils.artifact_download_iterator( + workflow_json, outputs, download_dir + ) + + output_model_name = utils.read_from_file_in_tar( + output_files["sagemaker-create-model"]["model_name"], "model_name.txt" + ) + print(f"model_name: {output_model_name}") + assert output_model_name == input_model_name + assert ( + sagemaker_utils.describe_model(sagemaker_client, input_model_name) is not None + ) + + utils.remove_dir(download_dir) diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py new file mode 100644 index 00000000000..037350f0b3f --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py @@ -0,0 +1,82 @@ +import pytest +import os +import json +import utils +from utils import kfp_client_utils +from utils import minio_utils +from utils import sagemaker_utils + + +@pytest.mark.parametrize( + "test_file_dir", + [ + pytest.param( + "resources/config/simple-mnist-training", marks=pytest.mark.canary_test + ) + ], +) +def test_trainingjob( + kfp_client, experiment_id, region, sagemaker_client, test_file_dir +): + + download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) + test_params = utils.load_params( + utils.replace_placeholders( + os.path.join(test_file_dir, "config.yaml"), + os.path.join(download_dir, "config.yaml"), + ) + ) + + test_params["Arguments"]["hyperparameters"] = json.dumps( + test_params["Arguments"]["hyperparameters"] + ) + test_params["Arguments"]["channels"] = json.dumps( + test_params["Arguments"]["channels"] + ) + _, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( + kfp_client, + experiment_id, + test_params["PipelineDefinition"], + test_params["Arguments"], + download_dir, + test_params["TestName"], + test_params["Timeout"], + ) + + outputs = { + "sagemaker-training-job": ["job_name", "model_artifact_url", "training_image"] + } + output_files = minio_utils.artifact_download_iterator( + workflow_json, outputs, download_dir + ) + + # Verify Training job was successful on SageMaker + training_job_name = utils.read_from_file_in_tar( + output_files["sagemaker-training-job"]["job_name"], "job_name.txt" + ) + print(f"training job name: {training_job_name}") + train_response = sagemaker_utils.describe_training_job( + sagemaker_client, training_job_name + ) + assert train_response["TrainingJobStatus"] == "Completed" + + # Verify model artifacts output was generated from this run + model_artifact_url = utils.read_from_file_in_tar( + output_files["sagemaker-training-job"]["model_artifact_url"], + "model_artifact_url.txt", + ) + print(f"model_artifact_url: {model_artifact_url}") + assert model_artifact_url == train_response["ModelArtifacts"]["S3ModelArtifacts"] + assert training_job_name in model_artifact_url + + # Verify training image output is an ECR image + training_image = utils.read_from_file_in_tar( + output_files["sagemaker-training-job"]["training_image"], "training_image.txt", + ) + print(f"Training image used: {training_image}") + if "ExpectedTrainingImage" in test_params.keys(): + assert test_params["ExpectedTrainingImage"] == training_image + else: + assert f"dkr.ecr.{region}.amazonaws.com" in training_image + + utils.remove_dir(download_dir) diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_workteam_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_workteam_component.py new file mode 100644 index 00000000000..e2243de8874 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_workteam_component.py @@ -0,0 +1,83 @@ +import pytest +import os +import json +import utils +from utils import kfp_client_utils +from utils import sagemaker_utils +from utils import minio_utils + + +def create_workteamjob( + kfp_client, experiment_id, region, sagemaker_client, test_file_dir, download_dir +): + + test_params = utils.load_params( + utils.replace_placeholders( + os.path.join(test_file_dir, "config.yaml"), + os.path.join(download_dir, "config.yaml"), + ) + ) + + # Get the account, region specific user_pool and client_id for the Sagemaker Workforce. + ( + test_params["Arguments"]["user_pool"], + test_params["Arguments"]["client_id"], + test_params["Arguments"]["user_groups"], + ) = sagemaker_utils.get_cognito_member_definitions(sagemaker_client) + + # Generate random prefix for workteam_name to avoid errors if resources with same name exists + test_params["Arguments"]["team_name"] = workteam_name = ( + utils.generate_random_string(5) + "-" + test_params["Arguments"]["team_name"] + ) + + _, _, workflow_json = kfp_client_utils.compile_run_monitor_pipeline( + kfp_client, + experiment_id, + test_params["PipelineDefinition"], + test_params["Arguments"], + download_dir, + test_params["TestName"], + test_params["Timeout"], + ) + + return workteam_name, workflow_json + + +@pytest.mark.parametrize( + "test_file_dir", + [pytest.param("resources/config/create-workteam", marks=pytest.mark.canary_test)], +) +def test_workteamjob( + kfp_client, experiment_id, region, sagemaker_client, test_file_dir +): + + download_dir = utils.mkdir(os.path.join(test_file_dir + "/generated")) + workteam_name, workflow_json = create_workteamjob( + kfp_client, experiment_id, region, sagemaker_client, test_file_dir, download_dir + ) + + outputs = {"sagemaker-private-workforce": ["workteam_arn"]} + output_files = minio_utils.artifact_download_iterator( + workflow_json, outputs, download_dir + ) + + try: + response = sagemaker_utils.describe_workteam(sagemaker_client, workteam_name) + + # Verify WorkTeam was created in SageMaker + assert response["Workteam"]["CreateDate"] is not None + assert response["Workteam"]["WorkteamName"] == workteam_name + + # Verify WorkTeam arn artifact was created in Minio and matches the one in SageMaker + workteam_arn = utils.read_from_file_in_tar( + output_files["sagemaker-private-workforce"]["workteam_arn"], + "workteam_arn.txt", + ) + assert response["Workteam"]["WorkteamArn"] == workteam_arn + + finally: + # Cleanup the SageMaker Resources + sagemaker_utils.delete_workteam(sagemaker_client, workteam_name) + + # Delete generated files only if the test is successful + utils.remove_dir(download_dir) diff --git a/components/aws/sagemaker/tests/integration_tests/conftest.py b/components/aws/sagemaker/tests/integration_tests/conftest.py new file mode 100644 index 00000000000..c022cadd2c0 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/conftest.py @@ -0,0 +1,117 @@ +import pytest +import boto3 +import kfp +import os +import utils + +from datetime import datetime +from filelock import FileLock + + +def pytest_addoption(parser): + parser.addoption( + "--region", + default="us-west-2", + required=False, + help="AWS region where test will run", + ) + parser.addoption( + "--role-arn", required=True, help="SageMaker execution IAM role ARN", + ) + parser.addoption( + "--s3-data-bucket", + required=True, + help="Regional S3 bucket name in which test data is hosted", + ) + parser.addoption( + "--minio-service-port", + default="9000", + required=False, + help="Localhost port to which minio service is mapped to", + ) + parser.addoption( + "--kfp-namespace", + default="kubeflow", + required=False, + help="Cluster namespace where kubeflow pipelines is installed", + ) + + +@pytest.fixture(scope="session", autouse=True) +def region(request): + os.environ["AWS_REGION"] = request.config.getoption("--region") + return request.config.getoption("--region") + + +@pytest.fixture(scope="session", autouse=True) +def role_arn(request): + os.environ["ROLE_ARN"] = request.config.getoption("--role-arn") + return request.config.getoption("--role-arn") + + +@pytest.fixture(scope="session", autouse=True) +def s3_data_bucket(request): + os.environ["S3_DATA_BUCKET"] = request.config.getoption("--s3-data-bucket") + return request.config.getoption("--s3-data-bucket") + + +@pytest.fixture(scope="session", autouse=True) +def minio_service_port(request): + os.environ["MINIO_SERVICE_PORT"] = request.config.getoption("--minio-service-port") + return request.config.getoption("--minio-service-port") + + +@pytest.fixture(scope="session", autouse=True) +def kfp_namespace(request): + os.environ["NAMESPACE"] = request.config.getoption("--kfp-namespace") + return request.config.getoption("--kfp-namespace") + + +@pytest.fixture(scope="session") +def boto3_session(region): + return boto3.Session(region_name=region) + + +@pytest.fixture(scope="session") +def sagemaker_client(boto3_session): + return boto3_session.client(service_name="sagemaker") + + +@pytest.fixture(scope="session") +def s3_client(boto3_session): + return boto3_session.client(service_name="s3") + + +@pytest.fixture(scope="session") +def kfp_client(): + kfp_installed_namespace = utils.get_kfp_namespace() + return kfp.Client(namespace=kfp_installed_namespace) + + +def get_experiment_id(kfp_client): + exp_name = datetime.now().strftime("%Y-%m-%d-%H-%M") + try: + experiment = kfp_client.get_experiment(experiment_name=exp_name) + except ValueError: + experiment = kfp_client.create_experiment(name=exp_name) + return experiment.id + + +@pytest.fixture(scope="session") +def experiment_id(kfp_client, tmp_path_factory, worker_id): + if not worker_id: + return get_experiment_id(kfp_client) + + # Locking taking as an example from + # https://github.com/pytest-dev/pytest-xdist#making-session-scoped-fixtures-execute-only-once + # get the temp directory shared by all workers + root_tmp_dir = tmp_path_factory.getbasetemp().parent + + fn = root_tmp_dir / "experiment_id" + with FileLock(str(fn) + ".lock"): + if fn.is_file(): + data = fn.read_text() + else: + data = get_experiment_id(kfp_client) + fn.write_text(data) + return data diff --git a/components/aws/sagemaker/tests/integration_tests/environment.yml b/components/aws/sagemaker/tests/integration_tests/environment.yml new file mode 100644 index 00000000000..90c7645bc61 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/environment.yml @@ -0,0 +1,22 @@ +name: kfp_test_env +channels: + - conda-forge + - defaults +dependencies: + - python=3.7.* + - pip=20.0.* + - awscli=1.18.* + - boto3=1.12.* + - pytest=5.* + - pytest-xdist=1.31.* + - pyyaml=5.3.* + - flake8=3.7.* + - flake8-black=0.1.* + - filelock=3.0.* + - pip: + - kubernetes==11.0.* + - kfp==0.5.* + - minio==5.0.10 + - sagemaker==1.56.* + + diff --git a/components/aws/sagemaker/tests/integration_tests/pytest.ini b/components/aws/sagemaker/tests/integration_tests/pytest.ini new file mode 100644 index 00000000000..b8b25ae727d --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +addopts = -rA +markers = + canary_test: test to be run as part of canaries. \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/create-workteam/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/create-workteam/config.yaml new file mode 100644 index 00000000000..244148e6abb --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/create-workteam/config.yaml @@ -0,0 +1,10 @@ +PipelineDefinition: resources/definition/workteam_pipeline.py +TestName: create-workteam +Timeout: 3600 +Arguments: + region: ((REGION)) + team_name: 'test-workteam' + description: 'Team for GroundTruth Integ Test' + user_pool: 'user-pool' + user_groups: 'user-group' + client_id: 'client-id' diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/image-classification-groundtruth/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/image-classification-groundtruth/config.yaml new file mode 100644 index 00000000000..bd583c9e692 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/image-classification-groundtruth/config.yaml @@ -0,0 +1,22 @@ +PipelineDefinition: resources/definition/groundtruth_pipeline.py +TestName: image-classification-groundtruth +Timeout: 10 +StatusToCheck: 'running' +Arguments: + region: ((REGION)) + role: ((ROLE_ARN)) + ground_truth_train_job_name: 'image-labeling' + ground_truth_label_attribute_name: 'category' + ground_truth_train_manifest_location: 's3://((DATA_BUCKET))/mini-image-classification/ground-truth-demo/train.manifest' + ground_truth_output_location: 's3://((DATA_BUCKET))/mini-image-classification/ground-truth-demo/output' + ground_truth_task_type: 'image classification' + ground_truth_worker_type: 'private' + ground_truth_label_category_config: 's3://((DATA_BUCKET))/mini-image-classification/ground-truth-demo/class_labels.json' + ground_truth_ui_template: 's3://((DATA_BUCKET))/mini-image-classification/ground-truth-demo/instructions.template' + ground_truth_title: 'Mini image classification' + ground_truth_description: 'Test for Ground Truth KFP component' + ground_truth_num_workers_per_object: '1' + ground_truth_time_limit: '30' + ground_truth_task_availibility: '3600' + ground_truth_max_concurrent_tasks: '20' + workteam_arn: 'workteam-arn' diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-batch-transform/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-batch-transform/config.yaml new file mode 100644 index 00000000000..5026d766697 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-batch-transform/config.yaml @@ -0,0 +1,24 @@ +PipelineDefinition: resources/definition/transform_job_pipeline.py +TestName: kmeans-batch-transform-test +Timeout: 1200 +ExpectedOutputFile: valid_data.csv.out +Arguments: + region: ((REGION)) + model_name: kmeans-mnist-model + job_name: kmeans-mnist-model + image: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 + model_artifact_url: s3://((DATA_BUCKET))/mnist_kmeans_example/model/kmeans-mnist-model/model.tar.gz + instance_type: ml.m4.xlarge + instance_count: 1 + network_isolation: "True" + role: ((ROLE_ARN)) + data_input: s3://((DATA_BUCKET))/mnist_kmeans_example/input + data_type: S3Prefix + content_type: text/csv + compression_type: None + output_location: s3://((DATA_BUCKET))/mnist_kmeans_example/output + max_concurrent: 4 + max_payload: 6 + batch_strategy: MultiRecord + split_type: Line + \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-endpoint/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-endpoint/config.yaml new file mode 100644 index 00000000000..d75320eb6bc --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-endpoint/config.yaml @@ -0,0 +1,21 @@ +PipelineDefinition: resources/definition/create_endpoint_pipeline.py +TestName: kmeans-create-endpoint-test +Timeout: 1800 +ExpectedPrediction: + predictions: + - distance_to_cluster: 7.448746204376221 + closest_cluster: 2.0 +Arguments: + region: ((REGION)) + model_name: kmeans-mnist-model + endpoint_config_name: kmeans-mnist-model + endpoint_name: kmeans-mnist-model + image: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 + model_artifact_url: s3://((DATA_BUCKET))/mnist_kmeans_example/model/kmeans-mnist-model/model.tar.gz + variant_name_1: variant-1 + initial_variant_weight_1: 1.0 + instance_type_1: ml.m4.xlarge + initial_instance_count_1: 1 + network_isolation: "True" + role: ((ROLE_ARN)) + \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-hpo/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-hpo/config.yaml new file mode 100644 index 00000000000..52db25f384f --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-hpo/config.yaml @@ -0,0 +1,56 @@ +PipelineDefinition: resources/definition/hpo_pipeline.py +TestName: kmeans-mnist-hpo-training +Timeout: 3600 +ExpectedTrainingImage: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 +Arguments: + region: ((REGION)) + algorithm_name: K-Means + training_input_mode: File + static_parameters: + k: "10" + feature_dim: "784" + integer_parameters: + - Name: "mini_batch_size" + MinValue: "450" + MaxValue: "550" + - Name: "extra_center_factor" + MinValue: "10" + MaxValue: "20" + channels: + - ChannelName: train + DataSource: + S3DataSource: + S3Uri: s3://((DATA_BUCKET))/mnist_kmeans_example/train_data + S3DataType: S3Prefix + S3DataDistributionType: FullyReplicated + CompressionType: None + RecordWrapperType: None + InputMode: File + - ChannelName: test + DataSource: + S3DataSource: + S3Uri: s3://((DATA_BUCKET))/mnist_kmeans_example/test_data + S3DataType: S3Prefix + S3DataDistributionType: FullyReplicated + CompressionType: None + RecordWrapperType: None + InputMode: File + categorical_parameters: + - Name: init_method + Values: + - random + - kmeans++ + early_stopping_type: "Off" + max_parallel_jobs: 1 + max_num_jobs: 1 + metric_name: test:msd + metric_type: Minimize + hpo_strategy: Bayesian + instance_type: ml.m5.xlarge + instance_count: 1 + volume_size: 50 + max_run_time: 3600 + output_location: s3://((DATA_BUCKET))/mnist_kmeans_example/output + network_isolation: "True" + max_wait_time: 3600 + role: ((ROLE_ARN)) diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-model/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-model/config.yaml new file mode 100644 index 00000000000..ab854fa936b --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/kmeans-mnist-model/config.yaml @@ -0,0 +1,11 @@ +PipelineDefinition: resources/definition/create_model_pipeline.py +TestName: kmeans-create-model-test +Timeout: 300 +Arguments: + region: ((REGION)) + model_name: kmeans-mnist-model + image: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 + model_artifact_url: s3://((DATA_BUCKET))/mnist_kmeans_example/model/kmeans-mnist-model/model.tar.gz + network_isolation: "True" + role: ((ROLE_ARN)) + \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/simple-mnist-training/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/simple-mnist-training/config.yaml new file mode 100644 index 00000000000..27155386eae --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/simple-mnist-training/config.yaml @@ -0,0 +1,32 @@ +PipelineDefinition: resources/definition/training_pipeline.py +TestName: simple-mnist-training +Timeout: 3600 +ExpectedTrainingImage: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 +Arguments: + region: ((REGION)) + image: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 + training_input_mode: File + hyperparameters: + k: "10" + feature_dim: "784" + channels: + - ChannelName: train + DataSource: + S3DataSource: + S3Uri: s3://((DATA_BUCKET))/mnist_kmeans_example/data + S3DataType: S3Prefix + S3DataDistributionType: FullyReplicated + CompressionType: None + RecordWrapperType: None + InputMode: File + instance_type: ml.m5.xlarge + instance_count: 1 + volume_size: 50 + max_run_time: 3600 + model_artifact_path: s3://((DATA_BUCKET))/mnist_kmeans_example/output + network_isolation: "True" + traffic_encryption: "False" + spot_instance: "False" + max_wait_time: 3600 + checkpoint_config: "{}" + role: ((ROLE_ARN)) diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/create_endpoint_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/create_endpoint_pipeline.py new file mode 100644 index 00000000000..8b28e52eac8 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/create_endpoint_pipeline.py @@ -0,0 +1,55 @@ +import kfp +from kfp import components +from kfp import dsl +from kfp.aws import use_aws_secret + +sagemaker_model_op = components.load_component_from_file("../../model/component.yaml") +sagemaker_deploy_op = components.load_component_from_file("../../deploy/component.yaml") + + +@dsl.pipeline( + name="Create Hosting Endpoint in SageMaker", + description="SageMaker deploy component test", +) +def create_endpoint_pipeline( + region="", + endpoint_url="", + image="", + model_name="", + endpoint_config_name="", + endpoint_name="", + model_artifact_url="", + variant_name_1="", + instance_type_1="", + initial_instance_count_1="", + initial_variant_weight_1="", + network_isolation="", + role="", +): + create_model = sagemaker_model_op( + region=region, + endpoint_url=endpoint_url, + model_name=model_name, + image=image, + model_artifact_url=model_artifact_url, + network_isolation=network_isolation, + role=role, + ) + + sagemaker_deploy_op( + region=region, + endpoint_url=endpoint_url, + endpoint_config_name=endpoint_config_name, + endpoint_name=endpoint_name, + model_name_1=create_model.output, + variant_name_1=variant_name_1, + instance_type_1=instance_type_1, + initial_instance_count_1=initial_instance_count_1, + initial_variant_weight_1=initial_variant_weight_1, + ) + + +if __name__ == "__main__": + kfp.compiler.Compiler().compile( + create_endpoint_pipeline, "SageMaker_hosting_pipeline" + ".yaml" + ) diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/create_model_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/create_model_pipeline.py new file mode 100644 index 00000000000..75f4f6a26e3 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/create_model_pipeline.py @@ -0,0 +1,35 @@ +import kfp +from kfp import components +from kfp import dsl +from kfp.aws import use_aws_secret + +sagemaker_model_op = components.load_component_from_file("../../model/component.yaml") + + +@dsl.pipeline( + name="Create Model in SageMaker", description="SageMaker model component test" +) +def create_model_pipeline( + region="", + endpoint_url="", + image="", + model_name="", + model_artifact_url="", + network_isolation="", + role="", +): + sagemaker_model_op( + region=region, + endpoint_url=endpoint_url, + model_name=model_name, + image=image, + model_artifact_url=model_artifact_url, + network_isolation=network_isolation, + role=role, + ) + + +if __name__ == "__main__": + kfp.compiler.Compiler().compile( + create_model_pipeline, "SageMaker_create_model_pipeline" + ".yaml" + ) diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/groundtruth_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/groundtruth_pipeline.py new file mode 100644 index 00000000000..23456abf543 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/groundtruth_pipeline.py @@ -0,0 +1,59 @@ +import kfp +import json +import copy +from kfp import components +from kfp import dsl +from kfp.aws import use_aws_secret + +sagemaker_gt_op = components.load_component_from_file( + "../../ground_truth/component.yaml" +) + + +@dsl.pipeline( + name="SageMaker GroundTruth image classification test pipeline", + description="SageMaker GroundTruth image classification test pipeline", +) +def ground_truth_test( + region="", + ground_truth_train_job_name="", + ground_truth_label_attribute_name="", + ground_truth_train_manifest_location="", + ground_truth_output_location="", + ground_truth_task_type="", + ground_truth_worker_type="", + ground_truth_label_category_config="", + ground_truth_ui_template="", + ground_truth_title="", + ground_truth_description="", + ground_truth_num_workers_per_object="", + ground_truth_time_limit="", + ground_truth_task_availibility="", + ground_truth_max_concurrent_tasks="", + role="", + workteam_arn="", +): + + ground_truth_train = sagemaker_gt_op( + region=region, + role=role, + job_name=ground_truth_train_job_name, + label_attribute_name=ground_truth_label_attribute_name, + manifest_location=ground_truth_train_manifest_location, + output_location=ground_truth_output_location, + task_type=ground_truth_task_type, + worker_type=ground_truth_worker_type, + workteam_arn=workteam_arn, + label_category_config=ground_truth_label_category_config, + ui_template=ground_truth_ui_template, + title=ground_truth_title, + description=ground_truth_description, + num_workers_per_object=ground_truth_num_workers_per_object, + time_limit=ground_truth_time_limit, + task_availibility=ground_truth_task_availibility, + max_concurrent_tasks=ground_truth_max_concurrent_tasks, + ) + + +if __name__ == "__main__": + kfp.compiler.Compiler().compile(ground_truth_test, __file__ + ".yaml") diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/hpo_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/hpo_pipeline.py new file mode 100644 index 00000000000..cd1a50fb57a --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/hpo_pipeline.py @@ -0,0 +1,65 @@ +import kfp +from kfp import components +from kfp import dsl +from kfp.aws import use_aws_secret + +sagemaker_hpo_op = components.load_component_from_file( + "../../hyperparameter_tuning/component.yaml" +) + + +@dsl.pipeline( + name="SageMaker HyperParameter Tuning", description="SageMaker HPO job test" +) +def hpo_pipeline( + region="", + algorithm_name="", + training_input_mode="", + static_parameters="", + integer_parameters="", + channels="", + categorical_parameters="", + early_stopping_type="", + max_parallel_jobs="", + max_num_jobs="", + metric_name="", + metric_type="", + hpo_strategy="", + instance_type="", + instance_count="", + volume_size="", + max_run_time="", + output_location="", + network_isolation="", + max_wait_time="", + role="", +): + sagemaker_hpo_op( + region=region, + algorithm_name=algorithm_name, + training_input_mode=training_input_mode, + static_parameters=static_parameters, + integer_parameters=integer_parameters, + channels=channels, + categorical_parameters=categorical_parameters, + early_stopping_type=early_stopping_type, + max_parallel_jobs=max_parallel_jobs, + max_num_jobs=max_num_jobs, + metric_name=metric_name, + metric_type=metric_type, + strategy=hpo_strategy, + instance_type=instance_type, + instance_count=instance_count, + volume_size=volume_size, + max_run_time=max_run_time, + output_location=output_location, + network_isolation=network_isolation, + max_wait_time=max_wait_time, + role=role, + ) + + +if __name__ == "__main__": + kfp.compiler.Compiler().compile( + hpo_pipeline, "SageMaker_hyperparameter_tuning_pipeline" + ".yaml" + ) diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py new file mode 100644 index 00000000000..ad8eab23bff --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py @@ -0,0 +1,55 @@ +import kfp +from kfp import components +from kfp import dsl +from kfp.aws import use_aws_secret + +sagemaker_train_op = components.load_component_from_file("../../train/component.yaml") + + +@dsl.pipeline(name="SageMaker Training", description="SageMaker training job test") +def training_pipeline( + region="", + endpoint_url="", + image="", + training_input_mode="", + hyperparameters="", + channels="", + instance_type="", + instance_count="", + volume_size="", + max_run_time="", + model_artifact_path="", + output_encryption_key="", + network_isolation="", + traffic_encryption="", + spot_instance="", + max_wait_time="", + checkpoint_config="{}", + role="", +): + sagemaker_train_op( + region=region, + endpoint_url=endpoint_url, + image=image, + training_input_mode=training_input_mode, + hyperparameters=hyperparameters, + channels=channels, + instance_type=instance_type, + instance_count=instance_count, + volume_size=volume_size, + max_run_time=max_run_time, + model_artifact_path=model_artifact_path, + output_encryption_key=output_encryption_key, + network_isolation=network_isolation, + traffic_encryption=traffic_encryption, + spot_instance=spot_instance, + max_wait_time=max_wait_time, + checkpoint_config=checkpoint_config, + role=role, + ) + + +if __name__ == "__main__": + kfp.compiler.Compiler().compile( + training_pipeline, "SageMaker_training_pipeline" + ".yaml" + ) diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/transform_job_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/transform_job_pipeline.py new file mode 100644 index 00000000000..e8b38697f3a --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/transform_job_pipeline.py @@ -0,0 +1,66 @@ +import kfp +from kfp import components +from kfp import dsl +from kfp.aws import use_aws_secret + +sagemaker_model_op = components.load_component_from_file("../../model/component.yaml") +sagemaker_batch_transform_op = components.load_component_from_file( + "../../batch_transform/component.yaml" +) + + +@dsl.pipeline( + name="Batch Transform Job in SageMaker", + description="SageMaker batch transform component test", +) +def batch_transform_pipeline( + region="", + image="", + model_name="", + job_name="", + model_artifact_url="", + instance_type="", + instance_count="", + data_input="", + data_type="", + content_type="", + compression_type="", + output_location="", + max_concurrent="", + max_payload="", + batch_strategy="", + split_type="", + network_isolation="", + role="", +): + create_model = sagemaker_model_op( + region=region, + model_name=model_name, + image=image, + model_artifact_url=model_artifact_url, + network_isolation=network_isolation, + role=role, + ) + + sagemaker_batch_transform_op( + region=region, + model_name=create_model.output, + job_name=job_name, + instance_type=instance_type, + instance_count=instance_count, + max_concurrent=max_concurrent, + max_payload=max_payload, + batch_strategy=batch_strategy, + input_location=data_input, + data_type=data_type, + content_type=content_type, + split_type=split_type, + compression_type=compression_type, + output_location=output_location, + ) + + +if __name__ == "__main__": + kfp.compiler.Compiler().compile( + batch_transform_pipeline, "SageMaker_batch_transform" + ".yaml" + ) diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/workteam_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/workteam_pipeline.py new file mode 100644 index 00000000000..3e227330128 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/workteam_pipeline.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 + +import kfp +import json +import copy +from kfp import components +from kfp import dsl +from kfp.aws import use_aws_secret + +sagemaker_workteam_op = components.load_component_from_file( + "../../workteam/component.yaml" +) + + +@dsl.pipeline( + name="SageMaker WorkTeam test pipeline", + description="SageMaker WorkTeam test pipeline", +) +def workteam_test( + region="", team_name="", description="", user_pool="", user_groups="", client_id="" +): + + workteam = sagemaker_workteam_op( + region=region, + team_name=team_name, + description=description, + user_pool=user_pool, + user_groups=user_groups, + client_id=client_id, + ) + + +if __name__ == "__main__": + kfp.compiler.Compiler().compile( + workteam_test, "SageMaker_WorkTeam_Pipelines" + ".yaml" + ) diff --git a/components/aws/sagemaker/tests/integration_tests/scripts/generate_iam_role b/components/aws/sagemaker/tests/integration_tests/scripts/generate_iam_role new file mode 100755 index 00000000000..7e4d1e9b143 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/scripts/generate_iam_role @@ -0,0 +1,68 @@ +#!/usr/bin/env bash + +# Helper script to generate an IAM Role needed to install role-based authentication to a KFP service account. +# +# Run as: +# $ ./generate_iam_role ${cluster_arn/cluster_name} ${role_name} ${cluster_region} [optional: ${service_namespace} ${service_account}] +# + +CLUSTER_ARN="${1}" +ROLE_NAME="${2}" +CLUSTER_REGION="${3:-us-east-1}" +SERVICE_NAMESPACE="${4:-kubeflow}" +SERVICE_ACCOUNT="${5:-pipeline-runner}" +aws_account=$(aws sts get-caller-identity --query Account --output text) +trustfile="trust.json" + +cwd=$(dirname $(realpath $0)) + +# if using an existing cluster, use the cluster arn to get the region and cluster name +# example, cluster_arn=arn:aws:eks:us-east-1:12345678910:cluster/test +cluster_name=$(echo ${CLUSTER_ARN} | cut -d'/' -f2) + +# A function to get the OIDC_ID associated with an EKS cluster +function get_oidc_id { + # TODO: Ideally this should be based on version compatibility instead of command failure + eksctl utils associate-iam-oidc-provider --cluster ${cluster_name} --region ${CLUSTER_REGION} --approve + if [[ $? -ge 1 ]]; then + eksctl utils associate-iam-oidc-provider --name ${cluster_name} --region ${CLUSTER_REGION} --approve + fi + + local oidc=$(aws eks describe-cluster --name ${cluster_name} --region ${CLUSTER_REGION} --query cluster.identity.oidc.issuer --output text) + oidc_id=$(echo ${oidc} | rev | cut -d'/' -f1 | rev) +} + +# A function that generates an IAM role for the given account, cluster, namespace, region +# Parameter: +# $1: Name of the trust file to generate. +function create_namespaced_iam_role { + local trustfile="${1}" + # Check if role already exists + aws iam get-role --role-name ${ROLE_NAME} + if [[ $? -eq 0 ]]; then + echo "A role for this cluster and namespace already exists in this account, assuming sagemaker access and proceeding." + else + echo "IAM Role does not exist, creating a new Role for the cluster" + aws iam create-role --role-name ${ROLE_NAME} --assume-role-policy-document file://${trustfile} --output=text --query "Role.Arn" + aws iam attach-role-policy --role-name ${ROLE_NAME} --policy-arn arn:aws:iam::aws:policy/AmazonSageMakerFullAccess + fi +} + +# Remove the generated trust file +# Parameter: +# $1: Name of the trust file to delete. +function delete_generated_file { + rm "${1}" +} + +echo "Get the OIDC ID for the cluster" +get_oidc_id +echo "Delete the trust json file if it already exists" +delete_generated_file "${trustfile}" +echo "Generate a trust json" +"$cwd"/generate_trust_policy ${CLUSTER_REGION} ${aws_account} ${oidc_id} ${SERVICE_NAMESPACE} ${SERVICE_ACCOUNT} > "${trustfile}" +echo "Create the IAM Role using these values" +create_namespaced_iam_role "${trustfile}" +echo "Cleanup for the next run" +delete_generated_file "${trustfile}" + diff --git a/components/aws/sagemaker/tests/integration_tests/scripts/generate_trust_policy b/components/aws/sagemaker/tests/integration_tests/scripts/generate_trust_policy new file mode 100755 index 00000000000..1c10fa10fe8 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/scripts/generate_trust_policy @@ -0,0 +1,39 @@ +#!/usr/bin/env bash + +# Helper script to generate trust the policy needed to assign role-based authentication to a KFP service account. +# +# Run as: +# $ ./generate_trust_policy ${EKS_CLUSTER_REGION} ${AWS_ACCOUNT_ID} ${OIDC_ID} ${SERVICE_NAMESPACE} ${SERVICE_ACCOUNT} > trust.json +# +# For example: +# $ ./generate_trust_policy us-west-2 123456789012 D48675832CA65BD10A532F597OIDCID > trust.json +# This will create a file `trust.json` containing a role policy that enables the KFP service runner in an EKS cluster to assume AWS roles. +# +# The SERVICE_NAMESPACE parameter is for when you want to run Kubeflow in a custom namespace other than "kubeflow". +# The SERVICE_ACCOUNT parameter is for when you want to give permissions to a service account other than the default "pipeline-runner". + +cluster_region="$1" +account_number="$2" +oidc_id="$3" +service_namespace="${4}" +service_account="${5}" + +printf '{ + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "Federated": "arn:aws:iam::'"${account_number}"':oidc-provider/oidc.eks.'"${cluster_region}"'.amazonaws.com/id/'"${oidc_id}"'" + }, + "Action": "sts:AssumeRoleWithWebIdentity", + "Condition": { + "StringEquals": { + "oidc.eks.'"${cluster_region}"'.amazonaws.com/id/'"${oidc_id}"':aud": "sts.amazonaws.com", + "oidc.eks.'"${cluster_region}"'.amazonaws.com/id/'"${oidc_id}"':sub": "system:serviceaccount:'"${service_namespace}"':'"${service_account}"'" + } + } + } + ] +} +' \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests b/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests new file mode 100755 index 00000000000..6ad3fb9db28 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests @@ -0,0 +1,168 @@ +#!/usr/bin/env bash + +set -u +set -o pipefail + +usage(){ + echo "Usage: $0 -n [-r ]" + exit 1 +} + +cwd=$(dirname $(realpath $0)) + +### Input parameters +DEPLOY_NAME="sagemaker-kfp-"$(date '+%Y-%m-%d-%H-%M-%S')"" # The name given to the entire deployment (tagging all resources) +REGION=${REGION:-"$(aws configure get region)"} # Deployment region + +### Configuration parameters +EKS_EXISTING_CLUSTER=${EKS_EXISTING_CLUSTER:-""} # Use an existing EKS cluster +EKS_CLUSTER_VERSION=${EKS_CLUSTER_VERSION:-"1.15"} # EKS cluster K8s version +EKS_NODE_COUNT=${EKS_NODE_COUNT:-"1"} # The initial node count of the EKS cluster +EKS_PUBLIC_SUBNETS=${EKS_PUBLIC_SUBNETS:-""} +EKS_PRIVATE_SUBNETS=${EKS_PRIVATE_SUBNETS:-""} + +### Testing parameters +MINIO_LOCAL_PORT=${MINIO_LOCAL_PORT:-9000} +KFP_NAMESPACE=${KFP_NAMESPACE:-"kubeflow"} +KFP_SERVICE_ACCOUNT=${KFP_SERVICE_ACCOUNT:-"pipeline-runner"} + +PYTEST_MARKER=${PYTEST_MARKER:-""} +S3_DATA_BUCKET=${S3_DATA_BUCKET:-""} +SAGEMAKER_EXECUTION_ROLE_ARN=${SAGEMAKER_EXECUTION_ROLE_ARN:-""} + +while getopts ":n:r:s:" opt; do + case $opt in + n) + DEPLOY_NAME="$OPTARG" + ;; + s) + S3_DATA_BUCKET="$OPTARG" + ;; + r) + REGION="$OPTARG" + ;; + \?) + echo "Invalid option: -$OPTARG" >&2 + exit 1 + ;; + :) + echo "Option -$OPTARG requires an argument." >&2 + exit 1 + ;; + esac +done + +# Ensure a deployment name was specified +if [ "$DEPLOY_NAME" == "" ]; then + echo "Missing deployment name" + usage + exit 1 +fi + +if [ "$S3_DATA_BUCKET" == "" ]; then + echo "Missing S3 data bucket name" + usage + exit 1 +fi + +function cleanup() { + set +e + + cleanup_kfp + delete_generated_role + + if [[ -z "${EKS_EXISTING_CLUSTER}" ]]; then + delete_eks + fi +} + +# Set the trap to clean up resources in the case of an error +trap cleanup EXIT +set -e + +function launch_eks() { + EKS_CLUSTER_NAME="${DEPLOY_NAME}-eks-cluster" + + echo "[Creating EKS] Launching EKS cluster $EKS_CLUSTER_NAME" + + eksctl_args=( --managed --nodes "${EKS_NODE_COUNT}" --node-type=c5.xlarge --timeout=30m --region "${REGION}" --auto-kubeconfig --version "${EKS_CLUSTER_VERSION}" ) + [ ! -z "${EKS_PUBLIC_SUBNETS}" ] && eksctl_args+=( --vpc-public-subnets="${EKS_PUBLIC_SUBNETS}" ) + [ ! -z "${EKS_PRIVATE_SUBNETS}" ] && eksctl_args+=( --vpc-private-subnets="${EKS_PRIVATE_SUBNETS}" ) + + eksctl create cluster "${EKS_CLUSTER_NAME}" "${eksctl_args[@]}" + + aws eks update-kubeconfig --name "$EKS_CLUSTER_NAME" --region "$REGION" + + echo "[Creating EKS] $EKS_CLUSTER_NAME launched" +} + +function delete_eks() { + eksctl delete cluster --name "${EKS_CLUSTER_NAME}" --region "${REGION}" +} + +function install_kfp() { + echo "[Installing KFP] Applying KFP manifests" + + PIPELINE_VERSION=0.5.1 + kubectl apply -k github.com/kubeflow/pipelines/manifests/kustomize/cluster-scoped-resources?ref=$PIPELINE_VERSION + kubectl wait --for condition=established --timeout=60s crd/applications.app.k8s.io + kubectl apply -k github.com/kubeflow/pipelines/manifests/kustomize/env/dev?ref=$PIPELINE_VERSION + + echo "[Installing KFP] Port-forwarding Minio" + + kubectl wait --for=condition=ready -n "${KFP_NAMESPACE}" pod -l app=minio --timeout=5m + kubectl port-forward -n kubeflow svc/minio-service $MINIO_LOCAL_PORT:9000 & + MINIO_PID=$! + + echo "[Installing KFP] Minio port-forwarded to ${MINIO_LOCAL_PORT}" + + echo "[Installing KFP] Waiting for pods to stand up" + + kubectl wait --for=condition=ready -n "${KFP_NAMESPACE}" pod -l app=ml-pipeline --timeout=5m + + # TODO: Replace with calculated waits + # For the moment we don't know which pods will be slower, so we are just relying on a fixed interval + sleep 3m + + echo "[Installing KFP] Pipeline pods are ready" +} + +function generate_iam_role_name() { + OIDC_ROLE_NAME="$(echo "${DEPLOY_NAME}-kubeflow-role" | cut -c1-64)" + OIDC_ROLE_ARN="arn:aws:iam::$(aws sts get-caller-identity --query=Account --output=text):role/${OIDC_ROLE_NAME}" +} + +function install_generated_role() { + kubectl patch serviceaccount -n ${KFP_NAMESPACE} ${KFP_SERVICE_ACCOUNT} --patch '{"metadata": {"annotations": {"eks.amazonaws.com/role-arn": "'"${OIDC_ROLE_ARN}"'"}}}' +} + +function delete_generated_role() { + # Delete the role associated with the cluster thats being deleted + aws iam detach-role-policy --role-name "${OIDC_ROLE_NAME}" --policy-arn arn:aws:iam::aws:policy/AmazonSageMakerFullAccess + aws iam delete-role --role-name "${OIDC_ROLE_NAME}" +} + +function cleanup_kfp() { + # Clean up Minio + if [ ! -z "${MINIO_PID}" ]; then + kill -9 $MINIO_PID || true + fi +} + +if [[ -z "${EKS_EXISTING_CLUSTER}" ]]; then + launch_eks +else + aws eks update-kubeconfig --name "${EKS_EXISTING_CLUSTER}" --region "$REGION" + EKS_CLUSTER_NAME="${EKS_EXISTING_CLUSTER}" + DEPLOY_NAME="${EKS_EXISTING_CLUSTER}" +fi + +generate_iam_role_name +"$cwd"/generate_iam_role ${EKS_CLUSTER_NAME} ${OIDC_ROLE_NAME} ${REGION} ${KFP_NAMESPACE} ${KFP_SERVICE_ACCOUNT} +install_kfp +install_generated_role + +pytest_args=( --region "${REGION}" --role-arn "${SAGEMAKER_EXECUTION_ROLE_ARN}" --s3-data-bucket "${S3_DATA_BUCKET}" --minio-service-port "${MINIO_LOCAL_PORT}" --kfp-namespace "${KFP_NAMESPACE}" ) +[ ! -z "${PYTEST_MARKER}" ] && pytest_args+=( -m "${PYTEST_MARKER}" ) + +cd tests/integration_tests && python -m pytest "${pytest_args[@]}" --junitxml ./integration_tests.log -n $(nproc) \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/utils/__init__.py b/components/aws/sagemaker/tests/integration_tests/utils/__init__.py new file mode 100644 index 00000000000..7b3be9448d0 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/utils/__init__.py @@ -0,0 +1,98 @@ +import os +import subprocess +import pytest +import tarfile +import yaml +import random +import string +import shutil + +from sagemaker.amazon.amazon_estimator import get_image_uri + + +def get_region(): + return os.environ.get("AWS_REGION") + + +def get_role_arn(): + return os.environ.get("ROLE_ARN") + + +def get_s3_data_bucket(): + return os.environ.get("S3_DATA_BUCKET") + + +def get_minio_service_port(): + return os.environ.get("MINIO_SERVICE_PORT") + + +def get_kfp_namespace(): + return os.environ.get("NAMESPACE") + + +def get_algorithm_image_registry(region, algorithm): + return get_image_uri(region, algorithm).split(".")[0] + + +def run_command(cmd, *popenargs, **kwargs): + if isinstance(cmd, str): + cmd = cmd.split(" ") + try: + print("executing command: {}".format(" ".join(cmd))) + return subprocess.check_output( + cmd, *popenargs, stderr=subprocess.STDOUT, **kwargs + ) + except subprocess.CalledProcessError as e: + pytest.fail(f"Command failed. Error code: {e.returncode}, Log: {e.output}") + + +def read_from_file_in_tar(file_path, file_name, decode=True): + with tarfile.open(file_path).extractfile(file_name) as f: + if decode: + return f.read().decode() + else: + return f.read() + + +def replace_placeholders(input_filename, output_filename): + region = get_region() + variables_to_replace = { + "((REGION))": region, + "((ROLE_ARN))": get_role_arn(), + "((DATA_BUCKET))": get_s3_data_bucket(), + "((KMEANS_REGISTRY))": get_algorithm_image_registry(region, "kmeans"), + } + + filedata = "" + with open(input_filename, "r") as f: + filedata = f.read() + for replace_key, replace_value in variables_to_replace.items(): + filedata = filedata.replace(replace_key, replace_value) + + with open(output_filename, "w") as f: + f.write(filedata) + return output_filename + + +def load_params(file_name): + with open(file_name, "r") as f: + return yaml.safe_load(f) + + +def generate_random_string(length): + """Generate a random string with twice the length of input parameter""" + assert isinstance(length, int) + return "".join( + [random.choice(string.ascii_lowercase) for n in range(length)] + + [random.choice(string.digits) for n in range(length)] + ) + + +def mkdir(directory_path): + if not os.path.exists(directory_path): + os.makedirs(directory_path) + return directory_path + + +def remove_dir(dir_path): + shutil.rmtree(dir_path) diff --git a/components/aws/sagemaker/tests/integration_tests/utils/argo_utils.py b/components/aws/sagemaker/tests/integration_tests/utils/argo_utils.py new file mode 100644 index 00000000000..874cbf4d11b --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/utils/argo_utils.py @@ -0,0 +1,8 @@ +import utils + + +def print_workflow_logs(workflow_name): + output = utils.run_command( + f"argo logs {workflow_name} -n {utils.get_kfp_namespace()}" + ) + print(f"workflow logs:\n", output.decode()) diff --git a/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py b/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py new file mode 100644 index 00000000000..2949f7b3c0a --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py @@ -0,0 +1,76 @@ +import os +import utils +import pytest +import time + +from utils import argo_utils + + +def compile_and_run_pipeline( + client, + experiment_id, + pipeline_definition, + input_params, + output_file_dir, + pipeline_name, +): + pipeline_path = os.path.join(output_file_dir, pipeline_name) + utils.run_command( + f"dsl-compile --py {pipeline_definition} --output {pipeline_path}.yaml" + ) + run = client.run_pipeline( + experiment_id, pipeline_name, f"{pipeline_path}.yaml", input_params + ) + return run.id + + +def wait_for_job_completion(client, run_id, timeout, status_to_check): + response = client.wait_for_run_completion(run_id, timeout) + status = response.run.status.lower() == status_to_check + return status + + +def wait_for_job_status(client, run_id, timeout, status_to_check="succeeded"): + if status_to_check == "succeeded": + status = wait_for_job_completion(client, run_id, timeout, status_to_check) + else: + time.sleep(timeout) + response = client.get_run(run_id) + status = response.run.status.lower() == status_to_check + + return status + + +def get_workflow_json(client, run_id): + # API not in readthedocs + # Refer: https://github.com/kubeflow/pipelines/blob/master/sdk/python/kfp/_client.py#L663 + return client._get_workflow_json(run_id) + + +def compile_run_monitor_pipeline( + client, + experiment_id, + pipeline_definition, + input_params, + output_file_dir, + pipeline_name, + timeout, + status_to_check="succeeded", + check=True, +): + run_id = compile_and_run_pipeline( + client, + experiment_id, + pipeline_definition, + input_params, + output_file_dir, + pipeline_name, + ) + status = wait_for_job_status(client, run_id, timeout, status_to_check) + workflow_json = get_workflow_json(client, run_id) + + if check and not status: + argo_utils.print_workflow_logs(workflow_json["metadata"]["name"]) + pytest.fail(f"Test Failed: {pipeline_name}. Run-id: {run_id}") + + return run_id, status, workflow_json diff --git a/components/aws/sagemaker/tests/integration_tests/utils/minio_utils.py b/components/aws/sagemaker/tests/integration_tests/utils/minio_utils.py new file mode 100644 index 00000000000..cba905b7de9 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/utils/minio_utils.py @@ -0,0 +1,47 @@ +import utils +import os + +from minio import Minio + + +def get_artifact_in_minio(workflow_json, step_name, artifact_name, output_dir): + """ Minio is the S3 style object storage server for K8s. This method parses a pipeline run's workflow json to + fetch the output artifact location in Minio server for given step in the pipeline and downloads it + + There are two types of nodes in the workflow_json: DAG and pod. DAG corresonds to the whole pipeline and + pod corresponds to a step in the DAG. Check `node["type"] != "DAG"` deals with case where name of component is + part of the pipeline name + """ + + s3_data = {} + minio_access_key = "minio" + minio_secret_key = "minio123" + minio_port = utils.get_minio_service_port() + for node in workflow_json["status"]["nodes"].values(): + if step_name in node["name"] and node["type"] != "DAG": + for artifact in node["outputs"]["artifacts"]: + if artifact["name"] == artifact_name: + s3_data = artifact["s3"] + minio_client = Minio( + "localhost:{}".format(minio_port), + access_key=minio_access_key, + secret_key=minio_secret_key, + secure=False, + ) + output_file = os.path.join(output_dir, artifact_name + ".tgz") + minio_client.fget_object(s3_data["bucket"], s3_data["key"], output_file) + # https://docs.min.io/docs/python-client-api-reference.html#fget_object + + return output_file + + +def artifact_download_iterator(workflow_json, outputs_dict, output_dir): + output_files = {} + for step_name, artifacts in outputs_dict.items(): + output_files[step_name] = {} + for artifact in artifacts: + output_files[step_name][artifact] = get_artifact_in_minio( + workflow_json, step_name, step_name + "-" + artifact, output_dir + ) + + return output_files diff --git a/components/aws/sagemaker/tests/integration_tests/utils/s3_utils.py b/components/aws/sagemaker/tests/integration_tests/utils/s3_utils.py new file mode 100644 index 00000000000..7fefa1545e3 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/utils/s3_utils.py @@ -0,0 +1,4 @@ +def check_object_exists(client, bucket, key): + waiter = client.get_waiter("object_exists") + waiter.wait(Bucket=bucket, Key=key) + return True diff --git a/components/aws/sagemaker/tests/integration_tests/utils/sagemaker_utils.py b/components/aws/sagemaker/tests/integration_tests/utils/sagemaker_utils.py new file mode 100644 index 00000000000..4898d89adbf --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/utils/sagemaker_utils.py @@ -0,0 +1,69 @@ +def describe_training_job(client, training_job_name): + return client.describe_training_job(TrainingJobName=training_job_name) + + +def describe_model(client, model_name): + return client.describe_model(ModelName=model_name) + + +def describe_endpoint(client, endpoint_name): + return client.describe_endpoint(EndpointName=endpoint_name) + + +def delete_endpoint(client, endpoint_name): + client.delete_endpoint(EndpointName=endpoint_name) + waiter = client.get_waiter("endpoint_deleted") + waiter.wait(EndpointName=endpoint_name) + + +def describe_hpo_job(client, job_name): + return client.describe_hyper_parameter_tuning_job( + HyperParameterTuningJobName=job_name + ) + + +def describe_transform_job(client, job_name): + return client.describe_transform_job(TransformJobName=job_name) + + +def describe_workteam(client, workteam_name): + return client.describe_workteam(WorkteamName=workteam_name) + + +def list_workteams(client): + return client.list_workteams() + + +def get_cognito_member_definitions(client): + # This is one way to get the user_pool and client_id for the Sagemaker Workforce. + # An alternative would be to take these values as user input via params or a config file. + # The current mechanism expects that there exists atleast one private workteam in the region. + default_workteam = list_workteams(client)["Workteams"][0]["MemberDefinitions"][0][ + "CognitoMemberDefinition" + ] + return ( + default_workteam["UserPool"], + default_workteam["ClientId"], + default_workteam["UserGroup"], + ) + + +def list_labeling_jobs_for_workteam(client, workteam_arn): + return client.list_labeling_jobs_for_workteam(WorkteamArn=workteam_arn) + + +def describe_labeling_job(client, labeling_job_name): + return client.describe_labeling_job(LabelingJobName=labeling_job_name) + + +def get_workteam_arn(client, workteam_name): + response = describe_workteam(client, workteam_name) + return response["Workteam"]["WorkteamArn"] + + +def delete_workteam(client, workteam_name): + client.delete_workteam(WorkteamName=workteam_name) + + +def stop_labeling_job(client, labeling_job_name): + client.stop_labeling_job(LabelingJobName=labeling_job_name) diff --git a/components/aws/sagemaker/tests/unit_tests/.gitignore b/components/aws/sagemaker/tests/unit_tests/.gitignore new file mode 100644 index 00000000000..1fa2e578575 --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/.gitignore @@ -0,0 +1,2 @@ +*.log +.coverage \ No newline at end of file diff --git a/components/aws/sagemaker/tests/unit_tests/Dockerfile b/components/aws/sagemaker/tests/unit_tests/Dockerfile new file mode 100644 index 00000000000..7a404f1b3d6 --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/Dockerfile @@ -0,0 +1,17 @@ +FROM amazonlinux:2 + +ENV PYTHONPATH /app + +RUN yum update -y \ + && yum install -y -q \ + python3 + +# requirements.txt is copied separately to preserve cache +COPY ./sagemaker/tests/unit_tests/requirements.txt . +RUN pip3 install -r requirements.txt + +COPY ./sagemaker/ /app/ + +WORKDIR /app/tests/unit_tests/ + +ENTRYPOINT [ "bash", "./run_all_tests.sh" ] \ No newline at end of file diff --git a/components/aws/sagemaker/tests/unit_tests/README.md b/components/aws/sagemaker/tests/unit_tests/README.md new file mode 100644 index 00000000000..4b59313c589 --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/README.md @@ -0,0 +1,41 @@ +# Unit tests for AWS SageMaker KFP Components + +## How to run these tests + +### Method 1 : Run these tests using docker + +1. Clone the git repo + ``` + git clone https://github.com/kubeflow/pipelines.git + ``` +2. Build the dockerfile + ``` + cd pipelines/components/aws + docker build ./ -f ./sagemaker/tests/unit_tests/Dockerfile -t amazon/unit-test-aws-sagemaker-kfp-components + ``` +3. Run all unit tests + ``` + docker run -it amazon/unit-test-aws-sagemaker-kfp-components + ``` + +-------------- + +### Method 2 : Run these tests locally + +1. Clone the git repo + ``` + git clone https://github.com/kubeflow/pipelines.git + ``` +2. Install the pip packages required for testing + ``` + cd pipelines/components/aws/sagemaker/tests/unit_tests/ + + pip install -r requirements.txt + ``` +3. Run all unit tests + ``` + # while in the same directory pipelines/components/aws/sagemaker/tests/unit_tests/ + + ./run_all_tests.sh + ``` + \ No newline at end of file diff --git a/components/aws/sagemaker/tests/unit_tests/pytest.ini b/components/aws/sagemaker/tests/unit_tests/pytest.ini new file mode 100644 index 00000000000..77749dcfd2b --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = -rA \ No newline at end of file diff --git a/components/aws/sagemaker/tests/unit_tests/requirements.txt b/components/aws/sagemaker/tests/unit_tests/requirements.txt new file mode 100644 index 00000000000..cb309fa658f --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/requirements.txt @@ -0,0 +1,6 @@ +boto3==1.12.33 +coverage==5.1 +pathlib2==2.3.5 +pytest==5.4.1 +pyyaml==5.3.1 +sagemaker==1.56.1 \ No newline at end of file diff --git a/components/aws/sagemaker/tests/unit_tests/run_all_tests.sh b/components/aws/sagemaker/tests/unit_tests/run_all_tests.sh new file mode 100755 index 00000000000..756716e0475 --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/run_all_tests.sh @@ -0,0 +1,4 @@ +export PYTHONPATH=../../ + +coverage run -m pytest --ignore=tests/test_utils.py --junitxml ./unit_tests.log +coverage report -m --omit "*/usr/*,tests/*,*__init__*,*/Python/*" \ No newline at end of file diff --git a/components/aws/sagemaker/test/__init__.py b/components/aws/sagemaker/tests/unit_tests/tests/__init__.py similarity index 100% rename from components/aws/sagemaker/test/__init__.py rename to components/aws/sagemaker/tests/unit_tests/tests/__init__.py diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py b/components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py new file mode 100644 index 00000000000..d1ced1efd1a --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py @@ -0,0 +1,39 @@ +import json +import unittest + +from unittest.mock import patch, Mock, MagicMock +from botocore.exceptions import ClientError +from datetime import datetime + +from batch_transform.src import batch_transform +from common import _utils +from . import test_utils + + +# TODO : Errors out if model_name doesn't contain '-' +# fix model_name '-' bug + +required_args = [ + '--region', 'us-west-2', + '--model_name', 'model-test', + '--input_location', 's3://fake-bucket/data', + '--output_location', 's3://fake-bucket/output', + '--instance_type', 'ml.c5.18xlarge', + '--output_location_file', 'tmp/' +] + +class BatchTransformTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + parser = batch_transform.create_parser() + cls.parser = parser + + def test_sample(self): + args = self.parser.parse_args(required_args) + response = _utils.create_transform_job_request(vars(args)) + self.assertEqual(response['TransformOutput']['S3OutputPath'], 's3://fake-bucket/output') + + def test_empty_string(self): + args = self.parser.parse_args(required_args) + response = _utils.create_transform_job_request(vars(args)) + test_utils.check_empty_string_values(response) \ No newline at end of file diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_deploy.py b/components/aws/sagemaker/tests/unit_tests/tests/test_deploy.py new file mode 100644 index 00000000000..63c720561b7 --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_deploy.py @@ -0,0 +1,32 @@ +import json +import unittest + +from unittest.mock import patch, Mock, MagicMock +from botocore.exceptions import ClientError +from datetime import datetime + +from deploy.src import deploy +from common import _utils +from . import test_utils + + +required_args = [ + '--region', 'us-west-2', + '--model_name_1', 'model-test' +] + +class DeployTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + parser = deploy.create_parser() + cls.parser = parser + + def test_sample(self): + args = self.parser.parse_args(required_args) + response = _utils.create_endpoint_config_request(vars(args)) + self.assertEqual(response['EndpointConfigName'], 'EndpointConfig-test') + + def test_empty_string(self): + args = self.parser.parse_args(required_args) + response = _utils.create_endpoint_config_request(vars(args)) + test_utils.check_empty_string_values(response) \ No newline at end of file diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_ground_truth.py b/components/aws/sagemaker/tests/unit_tests/tests/test_ground_truth.py new file mode 100644 index 00000000000..bf7d49d8985 --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_ground_truth.py @@ -0,0 +1,42 @@ +import json +import unittest + +from unittest.mock import patch, Mock, MagicMock +from botocore.exceptions import ClientError +from datetime import datetime + +from ground_truth.src import ground_truth +from common import _utils +from . import test_utils + + +required_args = [ + '--region', 'us-west-2', + '--role', 'arn:aws:iam::123456789012:user/Development/product_1234/*', + '--job_name', 'test_job', + '--manifest_location', 's3://fake-bucket/manifest', + '--output_location', 's3://fake-bucket/output', + '--task_type', 'fake-task', + '--worker_type', 'fake_worker', + '--ui_template', 's3://fake-bucket/ui_template', + '--title', 'fake-image-labelling-work', + '--description', 'fake job', + '--num_workers_per_object', '1', + '--time_limit', '180', +] + +class GroundTruthTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + parser = ground_truth.create_parser() + cls.parser = parser + + def test_sample(self): + args = self.parser.parse_args(required_args) + response = _utils.create_labeling_job_request(vars(args)) + self.assertEqual(response['LabelingJobName'], 'test_job') + + def test_empty_string(self): + args = self.parser.parse_args(required_args) + response = _utils.create_labeling_job_request(vars(args)) + test_utils.check_empty_string_values(response) diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_hpo.py b/components/aws/sagemaker/tests/unit_tests/tests/test_hpo.py new file mode 100644 index 00000000000..e5fb971bf64 --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_hpo.py @@ -0,0 +1,410 @@ +import json +import unittest + +from unittest.mock import patch, call, Mock, MagicMock, mock_open +from botocore.exceptions import ClientError +from datetime import datetime + +from hyperparameter_tuning.src import hyperparameter_tuning as hpo +from train.src import train +from common import _utils +from . import test_utils + + +required_args = [ + '--region', 'us-west-2', + '--role', 'arn:aws:iam::123456789012:user/Development/product_1234/*', + '--image', 'test-image', + '--metric_name', 'test-metric', + '--metric_type', 'Maximize', + '--channels', '[{"ChannelName": "train", "DataSource": {"S3DataSource":{"S3Uri": "s3://fake-bucket/data","S3DataType":"S3Prefix","S3DataDistributionType": "FullyReplicated"}},"ContentType":"","CompressionType": "None","RecordWrapperType":"None","InputMode": "File"}]', + '--output_location', 'test-output-location', + '--max_num_jobs', '5', + '--max_parallel_jobs', '2' +] + +class HyperparameterTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + parser = hpo.create_parser() + cls.parser = parser + + def test_create_parser(self): + self.assertIsNotNone(self.parser) + + def test_spot_bad_args(self): + no_max_wait_args = self.parser.parse_args(required_args + ['--spot_instance', 'True']) + no_checkpoint_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600']) + no_s3_uri_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600', '--checkpoint_config', '{}']) + + for arg in [no_max_wait_args, no_checkpoint_args, no_s3_uri_args]: + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(arg)) + + def test_spot_lesser_wait_time(self): + args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '86399', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/", "LocalPath": "local-path"}']) + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(args)) + + def test_spot_good_args(self): + good_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '86400', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/"}']) + response = _utils.create_hyperparameter_tuning_job_request(vars(good_args)) + self.assertTrue(response['TrainingJobDefinition']['EnableManagedSpotTraining']) + self.assertEqual(response['TrainingJobDefinition']['StoppingCondition']['MaxWaitTimeInSeconds'], 86400) + self.assertEqual(response['TrainingJobDefinition']['CheckpointConfig']['S3Uri'], 's3://fake-uri/') + + def test_spot_local_path(self): + args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '86400', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/", "LocalPath": "local-path"}']) + response = _utils.create_hyperparameter_tuning_job_request(vars(args)) + self.assertEqual(response['TrainingJobDefinition']['CheckpointConfig']['S3Uri'], 's3://fake-uri/') + self.assertEqual(response['TrainingJobDefinition']['CheckpointConfig']['LocalPath'], 'local-path') + + def test_empty_string(self): + good_args = self.parser.parse_args( + required_args + ['--spot_instance', 'True', '--max_wait_time', '86400', '--checkpoint_config', + '{"S3Uri": "s3://fake-uri/"}']) + response = _utils.create_hyperparameter_tuning_job_request(vars(good_args)) + test_utils.check_empty_string_values(response) + + def test_main(self): + # Mock out all of utils except parser + hpo._utils = MagicMock() + hpo._utils.add_default_client_arguments = _utils.add_default_client_arguments + + # Set some static returns + hpo._utils.create_hyperparameter_tuning_job.return_value = 'job-name' + hpo._utils.get_best_training_job_and_hyperparameters.return_value = 'best_job', 'best_hyperparameters' + hpo._utils.get_image_from_job.return_value = 'training-image' + hpo._utils.get_model_artifacts_from_job.return_value = 'model-artifacts' + + with patch('builtins.open', mock_open()) as file_open: + hpo.main(required_args) + + # Check if correct requests were created and triggered + hpo._utils.create_hyperparameter_tuning_job.assert_called() + hpo._utils.wait_for_hyperparameter_training_job.assert_called() + + # Check the file outputs + file_open.assert_has_calls([ + call('/tmp/hpo_job_name.txt', 'w'), + call('/tmp/best_job_name.txt', 'w'), + call('/tmp/best_hyperparameters.txt', 'w'), + call('/tmp/model_artifact_url.txt', 'w'), + call('/tmp/training_image.txt', 'w') + ], any_order=True) + + file_open().write.assert_has_calls([ + call('job-name'), + call('best_job'), + call('"best_hyperparameters"'), + call('model-artifacts'), + call('training-image'), + ], any_order=False) + + def test_create_hyperparameter_tuning_job(self): + mock_client = MagicMock() + mock_args = self.parser.parse_args(required_args + ['--job_name', 'test-job']) + response = _utils.create_hyperparameter_tuning_job(mock_client, vars(mock_args)) + + mock_client.create_hyper_parameter_tuning_job.assert_called_once_with( + HyperParameterTuningJobConfig={'Strategy': 'Bayesian', + 'HyperParameterTuningJobObjective': {'Type': 'Maximize', 'MetricName': 'test-metric'}, + 'ResourceLimits': {'MaxNumberOfTrainingJobs': 5, 'MaxParallelTrainingJobs': 2}, + 'ParameterRanges': {'IntegerParameterRanges': [], 'ContinuousParameterRanges': [], 'CategoricalParameterRanges': []}, + 'TrainingJobEarlyStoppingType': 'Off' + }, + HyperParameterTuningJobName='test-job', + Tags=[], + TrainingJobDefinition={'StaticHyperParameters': {}, + 'AlgorithmSpecification': {'TrainingImage': 'test-image', 'TrainingInputMode': 'File'}, + 'RoleArn': 'arn:aws:iam::123456789012:user/Development/product_1234/*', + 'InputDataConfig': [{'ChannelName': 'train', + 'DataSource': {'S3DataSource': {'S3Uri': 's3://fake-bucket/data', + 'S3DataType': 'S3Prefix', + 'S3DataDistributionType': 'FullyReplicated'}}, + 'ContentType': '', + 'CompressionType': 'None', + 'RecordWrapperType': 'None', + 'InputMode': 'File'}], + 'OutputDataConfig': {'KmsKeyId': '', 'S3OutputPath': 'test-output-location'}, + 'ResourceConfig': {'InstanceType': 'ml.m4.xlarge', 'InstanceCount': 1, 'VolumeSizeInGB': 1, 'VolumeKmsKeyId': ''}, + 'StoppingCondition': {'MaxRuntimeInSeconds': 86400}, + 'EnableNetworkIsolation': True, + 'EnableInterContainerTrafficEncryption': False, + 'EnableManagedSpotTraining': False} + ) + + self.assertEqual(response, 'test-job') + + + def test_sagemaker_exception_in_create_hyperparameter_tuning_job(self): + mock_client = MagicMock() + mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "create_hyperparameter_tuning_job") + mock_client.create_hyper_parameter_tuning_job.side_effect = mock_exception + mock_args = self.parser.parse_args(required_args) + + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job(mock_client, vars(mock_args)) + + def test_wait_for_hyperparameter_tuning_job(self): + mock_client = MagicMock() + mock_client.describe_hyper_parameter_tuning_job.side_effect = [ + {"HyperParameterTuningJobStatus": "InProgress"}, + {"HyperParameterTuningJobStatus": "Completed"}, + {"HyperParameterTuningJobStatus": "Should not be called"} + ] + + _utils.wait_for_hyperparameter_training_job(mock_client,'hyperparameter-tuning-job',0) + self.assertEqual(mock_client.describe_hyper_parameter_tuning_job.call_count, 2) + + def test_wait_for_failed_job(self): + mock_client = MagicMock() + mock_client.describe_hyper_parameter_tuning_job.side_effect = [ + {"HyperParameterTuningJobStatus": "InProgress"}, + {"HyperParameterTuningJobStatus": "Failed", "FailureReason": "Something broke lol"}, + {"HyperParameterTuningJobStatus": "Should not be called"} + ] + + with self.assertRaises(Exception): + _utils.wait_for_hyperparameter_training_job(mock_client, 'training-job', 0) + + self.assertEqual(mock_client.describe_hyper_parameter_tuning_job.call_count,2) + + def test_get_image_from_algorithm_job(self): + mock_client = MagicMock() + mock_client.describe_hyper_parameter_tuning_job.return_value = {"TrainingJobDefinition": {"AlgorithmSpecification": {"AlgorithmName": "my-algorithm"}}} + mock_client.describe_algorithm.return_value = {"TrainingSpecification": {"TrainingImage": "training-image-url"}} + + self.assertEqual(_utils.get_image_from_job(mock_client, 'training-job'), "training-image-url") + + def test_best_training_job(self): + mock_client = MagicMock() + mock_client.describe_hyper_parameter_tuning_job.return_value = {'BestTrainingJob': {'TrainingJobName': 'best_training_job'}} + mock_client.describe_training_job.return_value = {"HyperParameters": {"hp": "val", '_tuning_objective_metric': 'remove_me'}} + name, params =_utils.get_best_training_job_and_hyperparameters(mock_client, "mock-hpo-job") + self.assertEqual("best_training_job", name) + self.assertEqual("val", params["hp"]) + + def test_warm_start_and_parents_args(self): + # specifying both params + good_args = self.parser.parse_args(required_args + ['--warm_start_type', 'TransferLearning'] + ['--parent_hpo_jobs', 'A,B,C']) + response = _utils.create_hyperparameter_tuning_job_request(vars(good_args)) + self.assertIn('WarmStartConfig', response) + self.assertIn('ParentHyperParameterTuningJobs', response['WarmStartConfig']) + self.assertIn('WarmStartType', response['WarmStartConfig']) + self.assertEqual(response['WarmStartConfig']['ParentHyperParameterTuningJobs'][0]['HyperParameterTuningJobName'], 'A') + self.assertEqual(response['WarmStartConfig']['ParentHyperParameterTuningJobs'][1]['HyperParameterTuningJobName'], 'B') + self.assertEqual(response['WarmStartConfig']['ParentHyperParameterTuningJobs'][2]['HyperParameterTuningJobName'], 'C') + self.assertEqual(response['WarmStartConfig']['WarmStartType'], 'TransferLearning') + + def test_either_warm_start_or_parents_args(self): + # It will generate an exception if either warm_start_type or parent hpo jobs is being passed + missing_parent_hpo_jobs_args = self.parser.parse_args(required_args + ['--warm_start_type', 'TransferLearning']) + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(missing_parent_hpo_jobs_args)) + + missing_warm_start_type_args = self.parser.parse_args(required_args + ['--parent_hpo_jobs', 'A,B,C']) + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(missing_warm_start_type_args)) + + + def test_reasonable_required_args(self): + response = _utils.create_hyperparameter_tuning_job_request(vars(self.parser.parse_args(required_args))) + + # Ensure all of the optional arguments have reasonable default values + self.assertFalse(response['TrainingJobDefinition']['EnableManagedSpotTraining']) + self.assertDictEqual(response['TrainingJobDefinition']['StaticHyperParameters'], {}) + self.assertNotIn('VpcConfig', response['TrainingJobDefinition']) + self.assertNotIn('MetricDefinitions', response['TrainingJobDefinition']) + self.assertEqual(response['Tags'], []) + self.assertEqual(response['TrainingJobDefinition']['AlgorithmSpecification']['TrainingInputMode'], 'File') + self.assertEqual(response['TrainingJobDefinition']['OutputDataConfig']['S3OutputPath'], 'test-output-location') + + def test_metric_definitions(self): + metric_definition_args = self.parser.parse_args(required_args + ['--metric_definitions', '{"metric1": "regexval1", "metric2": "regexval2"}']) + response = _utils.create_hyperparameter_tuning_job_request(vars(metric_definition_args)) + + self.assertIn('MetricDefinitions', response['TrainingJobDefinition']['AlgorithmSpecification']) + response_metric_definitions = response['TrainingJobDefinition']['AlgorithmSpecification']['MetricDefinitions'] + + self.assertEqual(response_metric_definitions, [{ + 'Name': "metric1", + 'Regex': "regexval1" + }, { + 'Name': "metric2", + 'Regex': "regexval2" + }]) + + def test_no_defined_image(self): + # Pass the image to pass the parser + no_image_args = required_args.copy() + image_index = no_image_args.index('--image') + # Cut out --image and it's associated value + no_image_args = no_image_args[:image_index] + no_image_args[image_index+2:] + + parsed_args = self.parser.parse_args(no_image_args) + + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(parsed_args)) + + def test_first_party_algorithm(self): + algorithm_name_args = self.parser.parse_args(required_args + ['--algorithm_name', 'first-algorithm']) + + # Should not throw an exception + response = _utils.create_hyperparameter_tuning_job_request(vars(algorithm_name_args)) + self.assertIn('TrainingJobDefinition', response) + self.assertIn('TrainingImage', response['TrainingJobDefinition']['AlgorithmSpecification']) + self.assertNotIn('AlgorithmName', response['TrainingJobDefinition']['AlgorithmSpecification']) + + def test_known_algorithm_key(self): + # This passes an algorithm that is a known NAME of an algorithm + known_algorithm_args = required_args + ['--algorithm_name', 'seq2seq modeling'] + image_index = required_args.index('--image') + # Cut out --image and it's associated value + known_algorithm_args = known_algorithm_args[:image_index] + known_algorithm_args[image_index+2:] + + parsed_args = self.parser.parse_args(known_algorithm_args) + + # Patch get_image_uri + _utils.get_image_uri = MagicMock() + _utils.get_image_uri.return_value = "seq2seq-url" + + response = _utils.create_hyperparameter_tuning_job_request(vars(parsed_args)) + + _utils.get_image_uri.assert_called_with('us-west-2', 'seq2seq') + self.assertEqual(response['TrainingJobDefinition']['AlgorithmSpecification']['TrainingImage'], "seq2seq-url") + + + def test_known_algorithm_value(self): + # This passes an algorithm that is a known SageMaker algorithm name + known_algorithm_args = required_args + ['--algorithm_name', 'seq2seq'] + image_index = required_args.index('--image') + # Cut out --image and it's associated value + known_algorithm_args = known_algorithm_args[:image_index] + known_algorithm_args[image_index+2:] + + parsed_args = self.parser.parse_args(known_algorithm_args) + + # Patch get_image_uri + _utils.get_image_uri = MagicMock() + _utils.get_image_uri.return_value = "seq2seq-url" + + response = _utils.create_hyperparameter_tuning_job_request(vars(parsed_args)) + + _utils.get_image_uri.assert_called_with('us-west-2', 'seq2seq') + self.assertEqual(response['TrainingJobDefinition']['AlgorithmSpecification']['TrainingImage'], "seq2seq-url") + + + def test_unknown_algorithm(self): + known_algorithm_args = required_args + ['--algorithm_name', 'unknown algorithm'] + image_index = required_args.index('--image') + # Cut out --image and it's associated value + known_algorithm_args = known_algorithm_args[:image_index] + known_algorithm_args[image_index+2:] + + parsed_args = self.parser.parse_args(known_algorithm_args) + + # Patch get_image_uri + _utils.get_image_uri = MagicMock() + _utils.get_image_uri.return_value = "unknown-url" + + response = _utils.create_hyperparameter_tuning_job_request(vars(parsed_args)) + + # Should just place the algorithm name in regardless + _utils.get_image_uri.assert_not_called() + self.assertEqual(response['TrainingJobDefinition']['AlgorithmSpecification']['AlgorithmName'], "unknown algorithm") + + def test_no_channels(self): + no_channels_args = required_args.copy() + channels_index = required_args.index('--channels') + # Replace the value after the flag with an empty list + no_channels_args[channels_index + 1] = '[]' + parsed_args = self.parser.parse_args(no_channels_args) + + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(parsed_args)) + + def test_tags(self): + args = self.parser.parse_args(required_args + ['--tags', '{"key1": "val1", "key2": "val2"}']) + response = _utils.create_hyperparameter_tuning_job_request(vars(args)) + self.assertIn({'Key': 'key1', 'Value': 'val1'}, response['Tags']) + self.assertIn({'Key': 'key2', 'Value': 'val2'}, response['Tags']) + + def test_invalid_instance_type(self): + invalid_instance_args = required_args + ['--instance_type', 'invalid-instance'] + + with self.assertRaises(SystemExit): + self.parser.parse_args(invalid_instance_args) + + def test_valid_hyperparameters(self): + hyperparameters_str = '{"hp1": "val1", "hp2": "val2", "hp3": "val3"}' + categorical_params = '[{"Name" : "categorical", "Values": ["A", "B"]}]' + integer_params = '[{"MaxValue": "integer_val1", "MinValue": "integer_val2", "Name": "integer", "ScalingType": "test_integer"}]' + continuous_params = '[{"MaxValue": "continuous_val1", "MinValue": "continuous_val2", "Name": "continuous", "ScalingType": "test_continuous"}]' + good_args = self.parser.parse_args(required_args + ['--static_parameters', hyperparameters_str] + ['--integer_parameters', integer_params] + ['--continuous_parameters', continuous_params] + ['--categorical_parameters', categorical_params]) + response = _utils.create_hyperparameter_tuning_job_request(vars(good_args)) + + self.assertIn('hp1', response['TrainingJobDefinition']['StaticHyperParameters']) + self.assertIn('hp2', response['TrainingJobDefinition']['StaticHyperParameters']) + self.assertIn('hp3', response['TrainingJobDefinition']['StaticHyperParameters']) + self.assertEqual(response['TrainingJobDefinition']['StaticHyperParameters']['hp1'], "val1") + self.assertEqual(response['TrainingJobDefinition']['StaticHyperParameters']['hp2'], "val2") + self.assertEqual(response['TrainingJobDefinition']['StaticHyperParameters']['hp3'], "val3") + + self.assertIn('ParameterRanges', response['HyperParameterTuningJobConfig']) + self.assertIn('IntegerParameterRanges', response['HyperParameterTuningJobConfig']['ParameterRanges']) + self.assertIn('ContinuousParameterRanges', response['HyperParameterTuningJobConfig']['ParameterRanges']) + self.assertIn('CategoricalParameterRanges', response['HyperParameterTuningJobConfig']['ParameterRanges']) + self.assertIn('Name', response['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'][0]) + self.assertIn('Values', response['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'][0]) + self.assertIn('MaxValue', response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]) + self.assertIn('MinValue', response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]) + self.assertIn('Name', response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]) + self.assertIn('ScalingType', response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]) + self.assertIn('MaxValue', response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]) + self.assertIn('MinValue', response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]) + self.assertIn('Name', response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]) + self.assertIn('ScalingType', response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]) + + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'][0]['Name'], "categorical") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'][0]["Values"][0], "A") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['CategoricalParameterRanges'][0]["Values"][1], "B") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]['MaxValue'], "integer_val1") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]['MinValue'], "integer_val2") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]['Name'], "integer") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['IntegerParameterRanges'][0]['ScalingType'], "test_integer") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]['MaxValue'], "continuous_val1") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]['MinValue'], "continuous_val2") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]['Name'], "continuous") + self.assertEqual(response['HyperParameterTuningJobConfig']['ParameterRanges']['ContinuousParameterRanges'][0]['ScalingType'], "test_continuous") + + + def test_empty_hyperparameters(self): + hyperparameters_str = '{}' + + good_args = self.parser.parse_args(required_args + ['--static_parameters', hyperparameters_str]) + response = _utils.create_hyperparameter_tuning_job_request(vars(good_args)) + + self.assertEqual(response['TrainingJobDefinition']['StaticHyperParameters'], {}) + + def test_object_hyperparameters(self): + hyperparameters_str = '{"hp1": {"innerkey": "innerval"}}' + + invalid_args = self.parser.parse_args(required_args + ['--static_parameters', hyperparameters_str]) + with self.assertRaises(Exception): + _utils.create_hyperparameter_tuning_job_request(vars(invalid_args)) + + def test_vpc_configuration(self): + required_vpc_args = self.parser.parse_args(required_args + ['--vpc_security_group_ids', 'sg1,sg2', '--vpc_subnets', 'subnet1,subnet2']) + response = _utils.create_hyperparameter_tuning_job_request(vars(required_vpc_args)) + + self.assertIn('TrainingJobDefinition', response) + self.assertIn('VpcConfig', response['TrainingJobDefinition']) + self.assertIn('sg1', response['TrainingJobDefinition']['VpcConfig']['SecurityGroupIds']) + self.assertIn('sg2', response['TrainingJobDefinition']['VpcConfig']['SecurityGroupIds']) + self.assertIn('subnet1', response['TrainingJobDefinition']['VpcConfig']['Subnets']) + self.assertIn('subnet2', response['TrainingJobDefinition']['VpcConfig']['Subnets']) + + def test_training_mode(self): + required_vpc_args = self.parser.parse_args(required_args + ['--training_input_mode', 'Pipe']) + response = _utils.create_hyperparameter_tuning_job_request(vars(required_vpc_args)) + + self.assertEqual(response['TrainingJobDefinition']['AlgorithmSpecification']['TrainingInputMode'], 'Pipe') diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_model.py b/components/aws/sagemaker/tests/unit_tests/tests/test_model.py new file mode 100644 index 00000000000..d4fc1675855 --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_model.py @@ -0,0 +1,35 @@ +import json +import unittest + +from unittest.mock import patch, Mock, MagicMock +from botocore.exceptions import ClientError +from datetime import datetime + +from model.src import create_model +from common import _utils +from . import test_utils + + +required_args = [ + '--region', 'us-west-2', + '--model_name', 'model_test', + '--role', 'arn:aws:iam::123456789012:user/Development/product_1234/*', + '--image', 'test-image', + '--model_artifact_url', 's3://fake-bucket/model_artifact' +] + +class ModelTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + parser = create_model.create_parser() + cls.parser = parser + + def test_sample(self): + args = self.parser.parse_args(required_args) + response = _utils.create_model_request(vars(args)) + self.assertEqual(response['ModelName'], 'model_test') + + def test_empty_string(self): + args = self.parser.parse_args(required_args) + response = _utils.create_model_request(vars(args)) + test_utils.check_empty_string_values(response) \ No newline at end of file diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_train.py b/components/aws/sagemaker/tests/unit_tests/tests/test_train.py new file mode 100644 index 00000000000..6f749b57344 --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_train.py @@ -0,0 +1,327 @@ +import json +import unittest + +from unittest.mock import patch, call, Mock, MagicMock, mock_open +from botocore.exceptions import ClientError +from datetime import datetime + +from train.src import train +from common import _utils +from . import test_utils + +required_args = [ + '--region', 'us-west-2', + '--role', 'arn:aws:iam::123456789012:user/Development/product_1234/*', + '--image', 'test-image', + '--channels', '[{"ChannelName": "train", "DataSource": {"S3DataSource":{"S3Uri": "s3://fake-bucket/data","S3DataType":"S3Prefix","S3DataDistributionType": "FullyReplicated"}},"ContentType":"","CompressionType": "None","RecordWrapperType":"None","InputMode": "File"}]', + '--instance_type', 'ml.m4.xlarge', + '--instance_count', '1', + '--volume_size', '50', + '--max_run_time', '3600', + '--model_artifact_path', 'test-path' +] + +class TrainTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + parser = train.create_parser() + cls.parser = parser + + def test_create_parser(self): + self.assertIsNotNone(self.parser) + + def test_main(self): + # Mock out all of utils except parser + train._utils = MagicMock() + train._utils.add_default_client_arguments = _utils.add_default_client_arguments + + # Set some static returns + train._utils.create_training_job.return_value = 'job-name' + train._utils.get_image_from_job.return_value = 'training-image' + train._utils.get_model_artifacts_from_job.return_value = 'model-artifacts' + + with patch('builtins.open', mock_open()) as file_open: + train.main(required_args) + + # Check if correct requests were created and triggered + train._utils.create_training_job.assert_called() + train._utils.wait_for_training_job.assert_called() + + # Check the file outputs + file_open.assert_has_calls([ + call('/tmp/model_artifact_url.txt', 'w'), + call('/tmp/job_name.txt', 'w'), + call('/tmp/training_image.txt', 'w') + ], any_order=True) + + file_open().write.assert_has_calls([ + call('model-artifacts'), + call('job-name'), + call('training-image'), + ], any_order=False) # Must be in the same order as called + + def test_create_training_job(self): + mock_client = MagicMock() + mock_args = self.parser.parse_args(required_args + ['--job_name', 'test-job']) + response = _utils.create_training_job(mock_client, vars(mock_args)) + + mock_client.create_training_job.assert_called_once_with(AlgorithmSpecification={'TrainingImage': 'test-image', + 'TrainingInputMode': 'File'}, EnableInterContainerTrafficEncryption=False, EnableManagedSpotTraining=False, + EnableNetworkIsolation=True, HyperParameters={}, InputDataConfig=[{'ChannelName': 'train', 'DataSource': + {'S3DataSource': {'S3Uri': 's3://fake-bucket/data', 'S3DataType': 'S3Prefix', 'S3DataDistributionType': + 'FullyReplicated'}}, 'ContentType': '', 'CompressionType': 'None', 'RecordWrapperType': 'None', 'InputMode': + 'File'}], OutputDataConfig={'KmsKeyId': '', 'S3OutputPath': 'test-path'}, ResourceConfig={'InstanceType': + 'ml.m4.xlarge', 'InstanceCount': 1, 'VolumeSizeInGB': 50, 'VolumeKmsKeyId': ''}, + RoleArn='arn:aws:iam::123456789012:user/Development/product_1234/*', StoppingCondition={'MaxRuntimeInSeconds': + 3600}, Tags=[], TrainingJobName='test-job') + self.assertEqual(response, 'test-job') + + def test_sagemaker_exception_in_create_training_job(self): + mock_client = MagicMock() + mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "create_training_job") + mock_client.create_training_job.side_effect = mock_exception + mock_args = self.parser.parse_args(required_args) + + with self.assertRaises(Exception): + response = _utils.create_training_job(mock_client, vars(mock_args)) + + def test_wait_for_training_job(self): + mock_client = MagicMock() + mock_client.describe_training_job.side_effect = [ + {"TrainingJobStatus": "Starting"}, + {"TrainingJobStatus": "InProgress"}, + {"TrainingJobStatus": "Downloading"}, + {"TrainingJobStatus": "Completed"}, + {"TrainingJobStatus": "Should not be called"} + ] + + _utils.wait_for_training_job(mock_client, 'training-job', 0) + self.assertEqual(mock_client.describe_training_job.call_count, 4) + + def test_wait_for_failed_job(self): + mock_client = MagicMock() + mock_client.describe_training_job.side_effect = [ + {"TrainingJobStatus": "Starting"}, + {"TrainingJobStatus": "InProgress"}, + {"TrainingJobStatus": "Downloading"}, + {"TrainingJobStatus": "Failed", "FailureReason": "Something broke lol"}, + {"TrainingJobStatus": "Should not be called"} + ] + + with self.assertRaises(Exception): + _utils.wait_for_training_job(mock_client, 'training-job', 0) + + self.assertEqual(mock_client.describe_training_job.call_count, 4) + + def test_get_model_artifacts_from_job(self): + mock_client = MagicMock() + mock_client.describe_training_job.return_value = {"ModelArtifacts": {"S3ModelArtifacts": "s3://path/"}} + + self.assertEqual(_utils.get_model_artifacts_from_job(mock_client, 'training-job'), 's3://path/') + + def test_get_image_from_defined_job(self): + mock_client = MagicMock() + mock_client.describe_training_job.return_value = {"AlgorithmSpecification": {"TrainingImage": "training-image-url"}} + + self.assertEqual(_utils.get_image_from_job(mock_client, 'training-job'), "training-image-url") + + def test_get_image_from_algorithm_job(self): + mock_client = MagicMock() + mock_client.describe_training_job.return_value = {"AlgorithmSpecification": {"AlgorithmName": "my-algorithm"}} + mock_client.describe_algorithm.return_value = {"TrainingSpecification": {"TrainingImage": "training-image-url"}} + + self.assertEqual(_utils.get_image_from_job(mock_client, 'training-job'), "training-image-url") + + def test_reasonable_required_args(self): + response = _utils.create_training_job_request(vars(self.parser.parse_args(required_args))) + + # Ensure all of the optional arguments have reasonable default values + self.assertFalse(response['EnableManagedSpotTraining']) + self.assertDictEqual(response['HyperParameters'], {}) + self.assertNotIn('VpcConfig', response) + self.assertNotIn('MetricDefinitions', response) + self.assertEqual(response['Tags'], []) + self.assertEqual(response['AlgorithmSpecification']['TrainingInputMode'], 'File') + self.assertEqual(response['OutputDataConfig']['S3OutputPath'], 'test-path') + + def test_metric_definitions(self): + metric_definition_args = self.parser.parse_args(required_args + ['--metric_definitions', '{"metric1": "regexval1", "metric2": "regexval2"}']) + response = _utils.create_training_job_request(vars(metric_definition_args)) + + self.assertIn('MetricDefinitions', response['AlgorithmSpecification']) + response_metric_definitions = response['AlgorithmSpecification']['MetricDefinitions'] + + self.assertEqual(response_metric_definitions, [{ + 'Name': "metric1", + 'Regex': "regexval1" + }, { + 'Name': "metric2", + 'Regex': "regexval2" + }]) + + def test_no_defined_image(self): + # Pass the image to pass the parser + no_image_args = required_args.copy() + image_index = no_image_args.index('--image') + # Cut out --image and it's associated value + no_image_args = no_image_args[:image_index] + no_image_args[image_index+2:] + + parsed_args = self.parser.parse_args(no_image_args) + + with self.assertRaises(Exception): + _utils.create_training_job_request(vars(parsed_args)) + + def test_first_party_algorithm(self): + algorithm_name_args = self.parser.parse_args(required_args + ['--algorithm_name', 'first-algorithm']) + + # Should not throw an exception + response = _utils.create_training_job_request(vars(algorithm_name_args)) + self.assertIn('TrainingImage', response['AlgorithmSpecification']) + self.assertNotIn('AlgorithmName', response['AlgorithmSpecification']) + + def test_known_algorithm_key(self): + # This passes an algorithm that is a known NAME of an algorithm + known_algorithm_args = required_args + ['--algorithm_name', 'seq2seq modeling'] + image_index = required_args.index('--image') + # Cut out --image and it's associated value + known_algorithm_args = known_algorithm_args[:image_index] + known_algorithm_args[image_index+2:] + + parsed_args = self.parser.parse_args(known_algorithm_args) + + # Patch get_image_uri + _utils.get_image_uri = MagicMock() + _utils.get_image_uri.return_value = "seq2seq-url" + + response = _utils.create_training_job_request(vars(parsed_args)) + + _utils.get_image_uri.assert_called_with('us-west-2', 'seq2seq') + self.assertEqual(response['AlgorithmSpecification']['TrainingImage'], "seq2seq-url") + + def test_known_algorithm_value(self): + # This passes an algorithm that is a known SageMaker algorithm name + known_algorithm_args = required_args + ['--algorithm_name', 'seq2seq'] + image_index = required_args.index('--image') + # Cut out --image and it's associated value + known_algorithm_args = known_algorithm_args[:image_index] + known_algorithm_args[image_index+2:] + + parsed_args = self.parser.parse_args(known_algorithm_args) + + # Patch get_image_uri + _utils.get_image_uri = MagicMock() + _utils.get_image_uri.return_value = "seq2seq-url" + + response = _utils.create_training_job_request(vars(parsed_args)) + + _utils.get_image_uri.assert_called_with('us-west-2', 'seq2seq') + self.assertEqual(response['AlgorithmSpecification']['TrainingImage'], "seq2seq-url") + + def test_unknown_algorithm(self): + known_algorithm_args = required_args + ['--algorithm_name', 'unknown algorithm'] + image_index = required_args.index('--image') + # Cut out --image and it's associated value + known_algorithm_args = known_algorithm_args[:image_index] + known_algorithm_args[image_index+2:] + + parsed_args = self.parser.parse_args(known_algorithm_args) + + # Patch get_image_uri + _utils.get_image_uri = MagicMock() + _utils.get_image_uri.return_value = "unknown-url" + + response = _utils.create_training_job_request(vars(parsed_args)) + + # Should just place the algorithm name in regardless + _utils.get_image_uri.assert_not_called() + self.assertEqual(response['AlgorithmSpecification']['AlgorithmName'], "unknown algorithm") + + def test_no_channels(self): + no_channels_args = required_args.copy() + channels_index = required_args.index('--channels') + # Replace the value after the flag with an empty list + no_channels_args[channels_index + 1] = '[]' + parsed_args = self.parser.parse_args(no_channels_args) + + with self.assertRaises(Exception): + _utils.create_training_job_request(vars(parsed_args)) + + def test_invalid_instance_type(self): + invalid_instance_args = required_args + ['--instance_type', 'invalid-instance'] + + with self.assertRaises(SystemExit): + self.parser.parse_args(invalid_instance_args) + + def test_valid_hyperparameters(self): + hyperparameters_str = '{"hp1": "val1", "hp2": "val2", "hp3": "val3"}' + + good_args = self.parser.parse_args(required_args + ['--hyperparameters', hyperparameters_str]) + response = _utils.create_training_job_request(vars(good_args)) + + self.assertIn('hp1', response['HyperParameters']) + self.assertIn('hp2', response['HyperParameters']) + self.assertIn('hp3', response['HyperParameters']) + self.assertEqual(response['HyperParameters']['hp1'], "val1") + self.assertEqual(response['HyperParameters']['hp2'], "val2") + self.assertEqual(response['HyperParameters']['hp3'], "val3") + + def test_empty_hyperparameters(self): + hyperparameters_str = '{}' + + good_args = self.parser.parse_args(required_args + ['--hyperparameters', hyperparameters_str]) + response = _utils.create_training_job_request(vars(good_args)) + + self.assertEqual(response['HyperParameters'], {}) + + def test_object_hyperparameters(self): + hyperparameters_str = '{"hp1": {"innerkey": "innerval"}}' + + invalid_args = self.parser.parse_args(required_args + ['--hyperparameters', hyperparameters_str]) + with self.assertRaises(Exception): + _utils.create_training_job_request(vars(invalid_args)) + + def test_vpc_configuration(self): + required_vpc_args = self.parser.parse_args(required_args + ['--vpc_security_group_ids', 'sg1,sg2', '--vpc_subnets', 'subnet1,subnet2']) + response = _utils.create_training_job_request(vars(required_vpc_args)) + + self.assertIn('VpcConfig', response) + self.assertIn('sg1', response['VpcConfig']['SecurityGroupIds']) + self.assertIn('sg2', response['VpcConfig']['SecurityGroupIds']) + self.assertIn('subnet1', response['VpcConfig']['Subnets']) + self.assertIn('subnet2', response['VpcConfig']['Subnets']) + + def test_training_mode(self): + required_vpc_args = self.parser.parse_args(required_args + ['--training_input_mode', 'Pipe']) + response = _utils.create_training_job_request(vars(required_vpc_args)) + + self.assertEqual(response['AlgorithmSpecification']['TrainingInputMode'], 'Pipe') + + def test_spot_bad_args(self): + no_max_wait_args = self.parser.parse_args(required_args + ['--spot_instance', 'True']) + no_checkpoint_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600']) + no_s3_uri_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600', '--checkpoint_config', '{}']) + + for arg in [no_max_wait_args, no_checkpoint_args, no_s3_uri_args]: + with self.assertRaises(Exception): + _utils.create_training_job_request(vars(arg)) + + def test_spot_lesser_wait_time(self): + args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3599', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/", "LocalPath": "local-path"}']) + with self.assertRaises(Exception): + _utils.create_training_job_request(vars(args)) + + def test_spot_good_args(self): + good_args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/"}']) + response = _utils.create_training_job_request(vars(good_args)) + self.assertTrue(response['EnableManagedSpotTraining']) + self.assertEqual(response['StoppingCondition']['MaxWaitTimeInSeconds'], 3600) + self.assertEqual(response['CheckpointConfig']['S3Uri'], 's3://fake-uri/') + + def test_spot_local_path(self): + args = self.parser.parse_args(required_args + ['--spot_instance', 'True', '--max_wait_time', '3600', '--checkpoint_config', '{"S3Uri": "s3://fake-uri/", "LocalPath": "local-path"}']) + response = _utils.create_training_job_request(vars(args)) + self.assertEqual(response['CheckpointConfig']['S3Uri'], 's3://fake-uri/') + self.assertEqual(response['CheckpointConfig']['LocalPath'], 'local-path') + + def test_tags(self): + args = self.parser.parse_args(required_args + ['--tags', '{"key1": "val1", "key2": "val2"}']) + response = _utils.create_training_job_request(vars(args)) + self.assertIn({'Key': 'key1', 'Value': 'val1'}, response['Tags']) + self.assertIn({'Key': 'key2', 'Value': 'val2'}, response['Tags']) diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_utils.py b/components/aws/sagemaker/tests/unit_tests/tests/test_utils.py new file mode 100644 index 00000000000..fb62dd402be --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_utils.py @@ -0,0 +1,10 @@ +def check_empty_string_values(obj): + obj_has_empty_string = 0 + if type(obj) is dict: + for k,v in obj.items(): + if type(v) is str and v == '': + print(k + '- has empty string value') + obj_has_empty_string = 1 + elif type(v) is dict: + check_empty_string_values(v) + diff --git a/components/aws/sagemaker/tests/unit_tests/tests/test_workteam.py b/components/aws/sagemaker/tests/unit_tests/tests/test_workteam.py new file mode 100644 index 00000000000..7d5dcc7aee5 --- /dev/null +++ b/components/aws/sagemaker/tests/unit_tests/tests/test_workteam.py @@ -0,0 +1,33 @@ +import json +import unittest + +from unittest.mock import patch, Mock, MagicMock +from botocore.exceptions import ClientError +from datetime import datetime + +from workteam.src import workteam +from common import _utils +from . import test_utils + + +required_args = [ + '--region', 'us-west-2', + '--team_name', 'test-team', + '--description', 'fake team' +] + +class WorkTeamTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + parser = workteam.create_parser() + cls.parser = parser + + def test_sample(self): + args = self.parser.parse_args(required_args) + response = _utils.create_workteam_request(vars(args)) + self.assertEqual(response['WorkteamName'], 'test-team') + + def test_empty_string(self): + args = self.parser.parse_args(required_args) + response = _utils.create_workteam_request(vars(args)) + test_utils.check_empty_string_values(response) \ No newline at end of file diff --git a/components/aws/sagemaker/train/README.md b/components/aws/sagemaker/train/README.md new file mode 100644 index 00000000000..e8437f6d389 --- /dev/null +++ b/components/aws/sagemaker/train/README.md @@ -0,0 +1,48 @@ +# SageMaker Training Kubeflow Pipelines component +## Summary +Component to submit SageMaker Training jobs directly from a Kubeflow Pipelines workflow. +https://docs.aws.amazon.com/sagemaker/latest/dg/how-it-works-training.html + +# Details + +## Intended Use +For model training using AWS SageMaker. + +## Runtime Arguments +Argument | Description | Optional | Data type | Accepted values | Default | +:--- | :---------- | :----------| :----------| :---------- | :----------| +region | The region where the cluster launches | No | String | | | +endpoint_url | The endpoint URL for the private link VPC endpoint. | Yes | String | | | +job_name | The name of the Ground Truth job. Must be unique within the same AWS account and AWS region | Yes | String | | LabelingJob-[datetime]-[random id]| +role | The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf | No | String | | | +image | The registry path of the Docker image that contains the training algorithm | Yes | String | | | +algorithm_name | The name of the algorithm resource to use for the hyperparameter tuning job; only specify this parameter if training image is not specified | Yes | String | | | +metric_definitions | The dictionary of name-regex pairs specify the metrics that the algorithm emits | Yes | Dict | | {} | +put_mode | The input mode that the algorithm supports | No | String | File, Pipe | File | +hyperparameters | Hyperparameters for the selected algorithm | No | Dict | [Depends on Algo](https://docs.aws.amazon.com/sagemaker/latest/dg/k-means-api-config.html)| | +channels | A list of dicts specifying the input channels (at least one); refer to [documentation](https://github.com/awsdocs/amazon-sagemaker-developer-guide/blob/master/doc_source/API_Channel.md) for parameters | No | List of Dicts | | | +instance_type | The ML compute instance type | Yes | String | ml.m4.xlarge, ml.m4.2xlarge, ml.m4.4xlarge, ml.m4.10xlarge, ml.m4.16xlarge, ml.m5.large, ml.m5.xlarge, ml.m5.2xlarge, ml.m5.4xlarge, ml.m5.12xlarge, ml.m5.24xlarge, ml.c4.xlarge, ml.c4.2xlarge, ml.c4.4xlarge, ml.c4.8xlarge, ml.p2.xlarge, ml.p2.8xlarge, ml.p2.16xlarge, ml.p3.2xlarge, ml.p3.8xlarge, ml.p3.16xlarge, ml.c5.xlarge, ml.c5.2xlarge, ml.c5.4xlarge, ml.c5.9xlarge, ml.c5.18xlarge [and many more](https://aws.amazon.com/sagemaker/pricing/instance-types/) | ml.m4.xlarge | +instance_count | The number of ML compute instances to use in each training job | Yes | Int | ≥ 1 | 1 | +volume_size | The size of the ML storage volume that you want to provision in GB | Yes | Int | ≥ 1 | 30 | +resource_encryption_key | The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) | Yes | String | | | +max_run_time | The maximum run time in seconds per training job | Yes | Int | ≤ 432000 (5 days) | 86400 (1 day) | +model_artifact_path | | No | String | | | +output_encryption_key | The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts | Yes | String | | | +vpc_security_group_ids | A comma-delimited list of security group IDs, in the form sg-xxxxxxxx | Yes | String | | | +vpc_subnets | A comma-delimited list of subnet IDs in the VPC to which you want to connect your hpo job | Yes | String | | | +network_isolation | Isolates the training container if true | No | Boolean | False, True | True | +traffic_encryption | Encrypts all communications between ML compute instances in distributed training if true | No | Boolean | False, True | False | +spot_instance | Use managed spot training if true | No | Boolean | False, True | False | +max_wait_time | The maximum time in seconds you are willing to wait for a managed spot training job to complete | Yes | Int | ≤ 432000 (5 days) | 86400 (1 day) | +checkpoint_config | Dictionary of information about the output location for managed spot training checkpoint data | Yes | Dict | | {} | +tags | Key-value pairs to categorize AWS resources | Yes | Dict | | {} | + + +## Output +Stores the Model in the s3 bucket you specified + +# Example code +Simple example pipeline with only Train component : [simple_train_pipeline](https://github.com/kubeflow/pipelines/tree/master/samples/contrib/aws-samples/simple_train_pipeline) + +# Resources +* [Using Amazon built-in algorithms](https://docs.aws.amazon.com/sagemaker/latest/dg/sagemaker-algo-docker-registry-paths.html) diff --git a/components/aws/sagemaker/train/component.yaml b/components/aws/sagemaker/train/component.yaml index 2a2fff8d432..d27a3864e83 100644 --- a/components/aws/sagemaker/train/component.yaml +++ b/components/aws/sagemaker/train/component.yaml @@ -4,107 +4,108 @@ description: | inputs: - name: region description: 'The region where the training job launches.' + type: String - name: job_name description: 'The name of the batch training job.' default: '' + type: String - name: role description: 'The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.' + type: String - name: image description: 'The registry path of the Docker image that contains the training algorithm.' default: '' + type: String - name: algorithm_name description: 'The name of the algorithm resource to use for the training job. Do not specify a value for this if using training image.' default: '' + type: String - name: metric_definitions description: 'The dictionary of name-regex pairs specify the metrics that the algorithm emits.' default: '{}' + type: JsonObject - name: training_input_mode description: 'The input mode that the algorithm supports. File or Pipe.' default: 'File' + type: String - name: hyperparameters description: 'Dictionary of hyperparameters for the the algorithm.' default: '{}' + type: JsonObject - name: channels description: 'A list of dicts specifying the input channels. Must have at least one.' - - name: data_location_1 - description: 'The S3 URI of the input data source for channel 1.' - default: '' - - name: data_location_2 - description: 'The S3 URI of the input data source for channel 2.' - default: '' - - name: data_location_3 - description: 'The S3 URI of the input data source for channel 3.' - default: '' - - name: data_location_4 - description: 'The S3 URI of the input data source for channel 4.' - default: '' - - name: data_location_5 - description: 'The S3 URI of the input data source for channel 5.' - default: '' - - name: data_location_6 - description: 'The S3 URI of the input data source for channel 6.' - default: '' - - name: data_location_7 - description: 'The S3 URI of the input data source for channel 7.' - default: '' - - name: data_location_8 - description: 'The S3 URI of the input data source for channel 8.' - default: '' + type: JsonArray - name: instance_type description: 'The ML compute instance type.' default: 'ml.m4.xlarge' + type: String - name: instance_count description: 'The number of ML compute instances to use in each training job.' default: '1' + type: Integer - name: volume_size description: 'The size of the ML storage volume that you want to provision.' - default: '1' + default: '30' + type: Integer - name: resource_encryption_key description: 'The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).' default: '' + type: String - name: max_run_time description: 'The maximum run time in seconds for the training job.' default: '86400' + type: Integer - name: model_artifact_path description: 'Identifies the S3 path where you want Amazon SageMaker to store the model artifacts.' + type: String - name: output_encryption_key description: 'The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.' default: '' + type: String - name: vpc_security_group_ids description: 'The VPC security group IDs, in the form sg-xxxxxxxx.' default: '' + type: String - name: vpc_subnets description: 'The ID of the subnets in the VPC to which you want to connect your hpo job.' default: '' + type: String - name: network_isolation description: 'Isolates the training container.' default: 'True' + type: Bool - name: traffic_encryption description: 'Encrypts all communications between ML compute instances in distributed training.' default: 'False' + type: Bool - name: spot_instance description: 'Use managed spot training.' default: 'False' + type: Bool - name: max_wait_time description: 'The maximum time in seconds you are willing to wait for a managed spot training job to complete.' default: '86400' + type: Integer - name: checkpoint_config description: 'Dictionary of information about the output location for managed spot training checkpoint data.' default: '{}' + type: JsonObject - name: endpoint_url description: 'The endpoint URL for the private link VPC endpoint.' default: '' + type: String - name: tags description: 'Key-value pairs, to categorize AWS resources.' default: '{}' + type: JsonObject outputs: - {name: model_artifact_url, description: 'Model artifacts url'} - {name: job_name, description: 'Training job name'} - {name: training_image, description: 'The registry path of the Docker image that contains the training algorithm'} implementation: container: - image: redbackthomson/aws-kubeflow-sagemaker:20191003 - command: ['python'] + image: amazon/aws-sagemaker-kfp-components:0.3.1 + command: ['python3'] args: [ train.py, --region, {inputValue: region}, @@ -117,14 +118,6 @@ implementation: --training_input_mode, {inputValue: training_input_mode}, --hyperparameters, {inputValue: hyperparameters}, --channels, {inputValue: channels}, - --data_location_1, {inputValue: data_location_1}, - --data_location_2, {inputValue: data_location_2}, - --data_location_3, {inputValue: data_location_3}, - --data_location_4, {inputValue: data_location_4}, - --data_location_5, {inputValue: data_location_5}, - --data_location_6, {inputValue: data_location_6}, - --data_location_7, {inputValue: data_location_7}, - --data_location_8, {inputValue: data_location_8}, --instance_type, {inputValue: instance_type}, --instance_count, {inputValue: instance_count}, --volume_size, {inputValue: volume_size}, diff --git a/components/aws/sagemaker/train/src/train.py b/components/aws/sagemaker/train/src/train.py index 07d1b13cf36..d14d26282a3 100644 --- a/components/aws/sagemaker/train/src/train.py +++ b/components/aws/sagemaker/train/src/train.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import argparse import logging @@ -19,49 +20,39 @@ def create_parser(): parser = argparse.ArgumentParser(description='SageMaker Training Job') _utils.add_default_client_arguments(parser) - parser.add_argument('--job_name', type=str.strip, required=False, help='The name of the training job.', default='') - parser.add_argument('--role', type=str.strip, required=True, help='The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.') - parser.add_argument('--image', type=str.strip, required=True, help='The registry path of the Docker image that contains the training algorithm.', default='') - parser.add_argument('--algorithm_name', type=str.strip, required=False, help='The name of the resource algorithm to use for the training job.', default='') - parser.add_argument('--metric_definitions', type=_utils.str_to_json_dict, required=False, help='The dictionary of name-regex pairs specify the metrics that the algorithm emits.', default='{}') - parser.add_argument('--training_input_mode', choices=['File', 'Pipe'], type=str.strip, help='The input mode that the algorithm supports. File or Pipe.', default='File') - parser.add_argument('--hyperparameters', type=_utils.str_to_json_dict, help='Dictionary of hyperparameters for the the algorithm.', default='{}') - parser.add_argument('--channels', type=_utils.str_to_json_list, required=True, help='A list of dicts specifying the input channels. Must have at least one.') - parser.add_argument('--data_location_1', type=str.strip, required=False, help='The S3 URI of the input data source for channel 1.', default='') - parser.add_argument('--data_location_2', type=str.strip, required=False, help='The S3 URI of the input data source for channel 2.', default='') - parser.add_argument('--data_location_3', type=str.strip, required=False, help='The S3 URI of the input data source for channel 3.', default='') - parser.add_argument('--data_location_4', type=str.strip, required=False, help='The S3 URI of the input data source for channel 4.', default='') - parser.add_argument('--data_location_5', type=str.strip, required=False, help='The S3 URI of the input data source for channel 5.', default='') - parser.add_argument('--data_location_6', type=str.strip, required=False, help='The S3 URI of the input data source for channel 6.', default='') - parser.add_argument('--data_location_7', type=str.strip, required=False, help='The S3 URI of the input data source for channel 7.', default='') - parser.add_argument('--data_location_8', type=str.strip, required=False, help='The S3 URI of the input data source for channel 8.', default='') - parser.add_argument('--instance_type', required=True, choices=['ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', - 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', - 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge'], type=str.strip, help='The ML compute instance type.', default='ml.m4.xlarge') - parser.add_argument('--instance_count', required=True, type=_utils.str_to_int, help='The registry path of the Docker image that contains the training algorithm.', default=1) - parser.add_argument('--volume_size', type=_utils.str_to_int, required=True, help='The size of the ML storage volume that you want to provision.', default=1) - parser.add_argument('--resource_encryption_key', type=str.strip, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') - parser.add_argument('--max_run_time', type=_utils.str_to_int, required=True, help='The maximum run time in seconds for the training job.', default=86400) - parser.add_argument('--model_artifact_path', type=str.strip, required=True, help='Identifies the S3 path where you want Amazon SageMaker to store the model artifacts.') - parser.add_argument('--output_encryption_key', type=str.strip, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.', default='') - parser.add_argument('--vpc_security_group_ids', type=str.strip, required=False, help='The VPC security group IDs, in the form sg-xxxxxxxx.') - parser.add_argument('--vpc_subnets', type=str.strip, required=False, help='The ID of the subnets in the VPC to which you want to connect your hpo job.') + parser.add_argument('--job_name', type=str, required=False, help='The name of the training job.', default='') + parser.add_argument('--role', type=str, required=True, help='The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf.') + parser.add_argument('--image', type=str, required=False, help='The registry path of the Docker image that contains the training algorithm.', default='') + parser.add_argument('--algorithm_name', type=str, required=False, help='The name of the resource algorithm to use for the training job.', default='') + parser.add_argument('--metric_definitions', type=_utils.yaml_or_json_str, required=False, help='The dictionary of name-regex pairs specify the metrics that the algorithm emits.', default={}) + parser.add_argument('--training_input_mode', choices=['File', 'Pipe'], type=str, help='The input mode that the algorithm supports. File or Pipe.', default='File') + parser.add_argument('--hyperparameters', type=_utils.yaml_or_json_str, help='Dictionary of hyperparameters for the the algorithm.', default={}) + parser.add_argument('--channels', type=_utils.yaml_or_json_str, required=True, help='A list of dicts specifying the input channels. Must have at least one.') + parser.add_argument('--instance_type', required=False, type=str, help='The ML compute instance type.', default='ml.m4.xlarge') + parser.add_argument('--instance_count', required=True, type=int, help='The registry path of the Docker image that contains the training algorithm.', default=1) + parser.add_argument('--volume_size', type=int, required=True, help='The size of the ML storage volume that you want to provision.', default=30) + parser.add_argument('--resource_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s).', default='') + parser.add_argument('--max_run_time', type=int, required=True, help='The maximum run time in seconds for the training job.', default=86400) + parser.add_argument('--model_artifact_path', type=str, required=True, help='Identifies the S3 path where you want Amazon SageMaker to store the model artifacts.') + parser.add_argument('--output_encryption_key', type=str, required=False, help='The AWS KMS key that Amazon SageMaker uses to encrypt the model artifacts.', default='') + parser.add_argument('--vpc_security_group_ids', type=str, required=False, help='The VPC security group IDs, in the form sg-xxxxxxxx.') + parser.add_argument('--vpc_subnets', type=str, required=False, help='The ID of the subnets in the VPC to which you want to connect your hpo job.') parser.add_argument('--network_isolation', type=_utils.str_to_bool, required=False, help='Isolates the training container.', default=True) parser.add_argument('--traffic_encryption', type=_utils.str_to_bool, required=False, help='Encrypts all communications between ML compute instances in distributed training.', default=False) ### Start spot instance support parser.add_argument('--spot_instance', type=_utils.str_to_bool, required=False, help='Use managed spot training.', default=False) - parser.add_argument('--max_wait_time', type=_utils.str_to_int, required=False, help='The maximum time in seconds you are willing to wait for a managed spot training job to complete.', default=86400) - parser.add_argument('--checkpoint_config', type=_utils.str_to_json_dict, required=False, help='Dictionary of information about the output location for managed spot training checkpoint data.', default='{}') + parser.add_argument('--max_wait_time', type=int, required=False, help='The maximum time in seconds you are willing to wait for a managed spot training job to complete.', default=86400) + parser.add_argument('--checkpoint_config', type=_utils.yaml_or_json_str, required=False, help='Dictionary of information about the output location for managed spot training checkpoint data.', default={}) ### End spot instance support - parser.add_argument('--tags', type=_utils.str_to_json_dict, required=False, help='An array of key-value pairs, to categorize AWS resources.', default='{}') + parser.add_argument('--tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) return parser def main(argv=None): parser = create_parser() - args = parser.parse_args() + args = parser.parse_args(argv) logging.getLogger().setLevel(logging.INFO) client = _utils.get_sagemaker_client(args.region, args.endpoint_url) @@ -86,4 +77,4 @@ def main(argv=None): if __name__== "__main__": - main() + main(sys.argv[1:]) diff --git a/components/aws/sagemaker/workteam/component.yaml b/components/aws/sagemaker/workteam/component.yaml index a794e0bf8d1..589f25d709c 100644 --- a/components/aws/sagemaker/workteam/component.yaml +++ b/components/aws/sagemaker/workteam/component.yaml @@ -4,31 +4,40 @@ description: | inputs: - name: region description: 'The region where the cluster launches.' + type: String - name: team_name description: 'The name of your work team.' + type: String - name: description description: 'A description of the work team.' + type: String - name: user_pool description: 'An identifier for a user pool. The user pool must be in the same region as the service that you are calling.' + type: String - name: user_groups description: 'An identifier for a user group.' + type: String - name: client_id description: 'An identifier for an application client. You must create the app client ID using Amazon Cognito.' + type: String - name: sns_topic description: 'The ARN for the SNS topic to which notifications should be published.' default: '' + type: String - name: endpoint_url description: 'The endpoint URL for the private link VPC endpoint.' default: '' + type: String - name: tags description: 'Key-value pairs to categorize AWS resources.' default: '{}' + type: JsonObject outputs: - {name: workteam_arn, description: 'The ARN of the workteam.'} implementation: container: - image: redbackthomson/aws-kubeflow-sagemaker:20191003 - command: ['python'] + image: amazon/aws-sagemaker-kfp-components:0.3.1 + command: ['python3'] args: [ workteam.py, --region, {inputValue: region}, diff --git a/components/aws/sagemaker/workteam/src/workteam.py b/components/aws/sagemaker/workteam/src/workteam.py index cda4d0a7d50..5f9b6caaf4d 100644 --- a/components/aws/sagemaker/workteam/src/workteam.py +++ b/components/aws/sagemaker/workteam/src/workteam.py @@ -19,13 +19,13 @@ def create_parser(): parser = argparse.ArgumentParser(description='SageMaker Hyperparameter Tuning Job') _utils.add_default_client_arguments(parser) - parser.add_argument('--team_name', type=str.strip, required=True, help='The name of your work team.') - parser.add_argument('--description', type=str.strip, required=True, help='A description of the work team.') - parser.add_argument('--user_pool', type=str.strip, required=False, help='An identifier for a user pool. The user pool must be in the same region as the service that you are calling.', default='') - parser.add_argument('--user_groups', type=str.strip, required=False, help='A list of identifiers for user groups separated by commas.', default='') - parser.add_argument('--client_id', type=str.strip, required=False, help='An identifier for an application client. You must create the app client ID using Amazon Cognito.', default='') - parser.add_argument('--sns_topic', type=str.strip, required=False, help='The ARN for the SNS topic to which notifications should be published.', default='') - parser.add_argument('--tags', type=_utils.str_to_json_dict, required=False, help='An array of key-value pairs, to categorize AWS resources.', default='{}') + parser.add_argument('--team_name', type=str, required=True, help='The name of your work team.') + parser.add_argument('--description', type=str, required=True, help='A description of the work team.') + parser.add_argument('--user_pool', type=str, required=False, help='An identifier for a user pool. The user pool must be in the same region as the service that you are calling.', default='') + parser.add_argument('--user_groups', type=str, required=False, help='A list of identifiers for user groups separated by commas.', default='') + parser.add_argument('--client_id', type=str, required=False, help='An identifier for an application client. You must create the app client ID using Amazon Cognito.', default='') + parser.add_argument('--sns_topic', type=str, required=False, help='The ARN for the SNS topic to which notifications should be published.', default='') + parser.add_argument('--tags', type=_utils.yaml_or_json_str, required=False, help='An array of key-value pairs, to categorize AWS resources.', default={}) return parser diff --git a/components/deprecated/tfx/Evaluator/component.py b/components/deprecated/tfx/Evaluator/component.py new file mode 100644 index 00000000000..ad803837946 --- /dev/null +++ b/components/deprecated/tfx/Evaluator/component.py @@ -0,0 +1,128 @@ +# flake8: noqa TODO + +from kfp.components import InputPath, OutputPath + + +def Evaluator( + evaluation_path: OutputPath('ModelEvaluation'), + + examples_path: InputPath('Examples'), + model_path: InputPath('Model'), + baseline_model_path: InputPath('Model') = None, + schema_path: InputPath('Schema') = None, + + feature_slicing_spec: {'JsonObject': {'data_type': 'proto:tfx.components.evaluator.FeatureSlicingSpec'}} = None, # TODO: Replace feature_slicing_spec with eval_config + eval_config: {'JsonObject': {'data_type': 'proto:tensorflow_model_analysis.EvalConfig'}} = None, + fairness_indicator_thresholds: list = None, # List[str] + + #blessing_path: OutputPath('ModelBlessing') = None, # Optional outputs are not supported yet +): + """ + A TFX component to evaluate models trained by a TFX Trainer component. + + The Evaluator component performs model evaluations in the TFX pipeline and + the resultant metrics can be viewed in a Jupyter notebook. It uses the + input examples generated from the + [ExampleGen](https://www.tensorflow.org/tfx/guide/examplegen) + component to evaluate the models. + + Specifically, it can provide: + - metrics computed on entire training and eval dataset + - tracking metrics over time + - model quality performance on different feature slices + + ## Exporting the EvalSavedModel in Trainer + + In order to setup Evaluator in a TFX pipeline, an EvalSavedModel needs to be + exported during training, which is a special SavedModel containing + annotations for the metrics, features, labels, and so on in your model. + Evaluator uses this EvalSavedModel to compute metrics. + + As part of this, the Trainer component creates eval_input_receiver_fn, + analogous to the serving_input_receiver_fn, which will extract the features + and labels from the input data. As with serving_input_receiver_fn, there are + utility functions to help with this. + + Please see https://www.tensorflow.org/tfx/model_analysis for more details. + + Args: + examples: A Channel of 'Examples' type, usually produced by ExampleGen + component. @Ark-kun: Must have the eval split. _required_ + model: A Channel of 'Model' type, usually produced by + Trainer component. + feature_slicing_spec: + [evaluator_pb2.FeatureSlicingSpec](https://github.com/tensorflow/tfx/blob/master/tfx/proto/evaluator.proto) + instance that describes how Evaluator should slice the data. + Returns: + evaluation: Channel of `ModelEvaluation` to store the evaluation results. + + Either `model_exports` or `model` must be present in the input arguments. + + """ + from tfx.components.evaluator.component import Evaluator as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get(name + '_path', None) + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op( + Evaluator, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/Evaluator/component.yaml b/components/deprecated/tfx/Evaluator/component.yaml new file mode 100644 index 00000000000..70e89c6b247 --- /dev/null +++ b/components/deprecated/tfx/Evaluator/component.yaml @@ -0,0 +1,245 @@ +name: Evaluator +description: |- + A TFX component to evaluate models trained by a TFX Trainer component. + + The Evaluator component performs model evaluations in the TFX pipeline and + the resultant metrics can be viewed in a Jupyter notebook. It uses the + input examples generated from the + [ExampleGen](https://www.tensorflow.org/tfx/guide/examplegen) + component to evaluate the models. + + Specifically, it can provide: + - metrics computed on entire training and eval dataset + - tracking metrics over time + - model quality performance on different feature slices + + ## Exporting the EvalSavedModel in Trainer + + In order to setup Evaluator in a TFX pipeline, an EvalSavedModel needs to be + exported during training, which is a special SavedModel containing + annotations for the metrics, features, labels, and so on in your model. + Evaluator uses this EvalSavedModel to compute metrics. + + As part of this, the Trainer component creates eval_input_receiver_fn, + analogous to the serving_input_receiver_fn, which will extract the features + and labels from the input data. As with serving_input_receiver_fn, there are + utility functions to help with this. + + Please see https://www.tensorflow.org/tfx/model_analysis for more details. + + Args: + examples: A Channel of 'Examples' type, usually produced by ExampleGen + component. @Ark-kun: Must have the eval split. _required_ + model: A Channel of 'Model' type, usually produced by + Trainer component. + feature_slicing_spec: + [evaluator_pb2.FeatureSlicingSpec](https://github.com/tensorflow/tfx/blob/master/tfx/proto/evaluator.proto) + instance that describes how Evaluator should slice the data. + Returns: + evaluation: Channel of `ModelEvaluation` to store the evaluation results. + + Either `model_exports` or `model` must be present in the input arguments. +inputs: +- {name: examples, type: Examples} +- {name: model, type: Model} +- {name: baseline_model, type: Model, optional: true} +- {name: schema, type: Schema, optional: true} +- name: feature_slicing_spec + type: + JsonObject: {data_type: 'proto:tfx.components.evaluator.FeatureSlicingSpec'} + optional: true +- name: eval_config + type: + JsonObject: {data_type: 'proto:tensorflow_model_analysis.EvalConfig'} + optional: true +- {name: fairness_indicator_thresholds, type: JsonArray, optional: true} +outputs: +- {name: evaluation, type: ModelEvaluation} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def Evaluator( + evaluation_path , + + examples_path , + model_path , + baseline_model_path = None, + schema_path = None, + + feature_slicing_spec = None, # TODO: Replace feature_slicing_spec with eval_config + eval_config = None, + fairness_indicator_thresholds = None, # List[str] + + #blessing_path: OutputPath('ModelBlessing') = None, # Optional outputs are not supported yet + ): + """ + A TFX component to evaluate models trained by a TFX Trainer component. + + The Evaluator component performs model evaluations in the TFX pipeline and + the resultant metrics can be viewed in a Jupyter notebook. It uses the + input examples generated from the + [ExampleGen](https://www.tensorflow.org/tfx/guide/examplegen) + component to evaluate the models. + + Specifically, it can provide: + - metrics computed on entire training and eval dataset + - tracking metrics over time + - model quality performance on different feature slices + + ## Exporting the EvalSavedModel in Trainer + + In order to setup Evaluator in a TFX pipeline, an EvalSavedModel needs to be + exported during training, which is a special SavedModel containing + annotations for the metrics, features, labels, and so on in your model. + Evaluator uses this EvalSavedModel to compute metrics. + + As part of this, the Trainer component creates eval_input_receiver_fn, + analogous to the serving_input_receiver_fn, which will extract the features + and labels from the input data. As with serving_input_receiver_fn, there are + utility functions to help with this. + + Please see https://www.tensorflow.org/tfx/model_analysis for more details. + + Args: + examples: A Channel of 'Examples' type, usually produced by ExampleGen + component. @Ark-kun: Must have the eval split. _required_ + model: A Channel of 'Model' type, usually produced by + Trainer component. + feature_slicing_spec: + [evaluator_pb2.FeatureSlicingSpec](https://github.com/tensorflow/tfx/blob/master/tfx/proto/evaluator.proto) + instance that describes how Evaluator should slice the data. + Returns: + evaluation: Channel of `ModelEvaluation` to store the evaluation results. + + Either `model_exports` or `model` must be present in the input arguments. + + """ + from tfx.components.evaluator.component import Evaluator as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get(name + '_path', None) + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='Evaluator', description="A TFX component to evaluate models trained by a TFX Trainer component.\n\n The Evaluator component performs model evaluations in the TFX pipeline and\n the resultant metrics can be viewed in a Jupyter notebook. It uses the\n input examples generated from the\n [ExampleGen](https://www.tensorflow.org/tfx/guide/examplegen)\n component to evaluate the models.\n\n Specifically, it can provide:\n - metrics computed on entire training and eval dataset\n - tracking metrics over time\n - model quality performance on different feature slices\n\n ## Exporting the EvalSavedModel in Trainer\n\n In order to setup Evaluator in a TFX pipeline, an EvalSavedModel needs to be\n exported during training, which is a special SavedModel containing\n annotations for the metrics, features, labels, and so on in your model.\n Evaluator uses this EvalSavedModel to compute metrics.\n\n As part of this, the Trainer component creates eval_input_receiver_fn,\n analogous to the serving_input_receiver_fn, which will extract the features\n and labels from the input data. As with serving_input_receiver_fn, there are\n utility functions to help with this.\n\n Please see https://www.tensorflow.org/tfx/model_analysis for more details.\n\n Args:\n examples: A Channel of 'Examples' type, usually produced by ExampleGen\n component. @Ark-kun: Must have the eval split. _required_\n model: A Channel of 'Model' type, usually produced by\n Trainer component.\n feature_slicing_spec:\n [evaluator_pb2.FeatureSlicingSpec](https://github.com/tensorflow/tfx/blob/master/tfx/proto/evaluator.proto)\n instance that describes how Evaluator should slice the data.\n Returns:\n evaluation: Channel of `ModelEvaluation` to store the evaluation results.\n\n Either `model_exports` or `model` must be present in the input arguments.") + _parser.add_argument("--examples", dest="examples_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--model", dest="model_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--baseline-model", dest="baseline_model_path", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--schema", dest="schema_path", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--feature-slicing-spec", dest="feature_slicing_spec", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--eval-config", dest="eval_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--fairness-indicator-thresholds", dest="fairness_indicator_thresholds", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--evaluation", dest="evaluation_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = Evaluator(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --examples + - {inputPath: examples} + - --model + - {inputPath: model} + - if: + cond: {isPresent: baseline_model} + then: + - --baseline-model + - {inputPath: baseline_model} + - if: + cond: {isPresent: schema} + then: + - --schema + - {inputPath: schema} + - if: + cond: {isPresent: feature_slicing_spec} + then: + - --feature-slicing-spec + - {inputValue: feature_slicing_spec} + - if: + cond: {isPresent: eval_config} + then: + - --eval-config + - {inputValue: eval_config} + - if: + cond: {isPresent: fairness_indicator_thresholds} + then: + - --fairness-indicator-thresholds + - {inputValue: fairness_indicator_thresholds} + - --evaluation + - {outputPath: evaluation} diff --git a/components/deprecated/tfx/Evaluator/with_URI_IO/component.py b/components/deprecated/tfx/Evaluator/with_URI_IO/component.py new file mode 100644 index 00000000000..c5f14002c58 --- /dev/null +++ b/components/deprecated/tfx/Evaluator/with_URI_IO/component.py @@ -0,0 +1,103 @@ +# flake8: noqa + +from typing import NamedTuple + +def Evaluator( + examples_uri: 'ExamplesUri', + model_uri: 'ModelUri', + output_evaluation_uri: 'ModelEvaluationUri', + output_blessing_uri: 'ModelBlessingUri', + baseline_model_uri: 'ModelUri' = None, + schema_uri: 'SchemaUri' = None, + eval_config: {'JsonObject': {'data_type': 'proto:tensorflow_model_analysis.EvalConfig'}} = None, + feature_slicing_spec: {'JsonObject': {'data_type': 'proto:tfx.components.evaluator.FeatureSlicingSpec'}} = None, + fairness_indicator_thresholds: list = None, + beam_pipeline_args: list = None, +) -> NamedTuple('Outputs', [ + ('evaluation_uri', 'ModelEvaluationUri'), + ('blessing_uri', 'ModelBlessingUri'), +]): + from tfx.components import Evaluator as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_evaluation_uri, output_blessing_uri, ) + + +if __name__ == '__main__': + import kfp + kfp.components.create_component_from_func( + Evaluator, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/Evaluator/with_URI_IO/component.yaml b/components/deprecated/tfx/Evaluator/with_URI_IO/component.yaml new file mode 100644 index 00000000000..c5e16ca8809 --- /dev/null +++ b/components/deprecated/tfx/Evaluator/with_URI_IO/component.yaml @@ -0,0 +1,192 @@ +name: Evaluator +inputs: +- {name: examples_uri, type: ExamplesUri} +- {name: model_uri, type: ModelUri} +- {name: output_evaluation_uri, type: ModelEvaluationUri} +- {name: output_blessing_uri, type: ModelBlessingUri} +- {name: baseline_model_uri, type: ModelUri, optional: true} +- {name: schema_uri, type: SchemaUri, optional: true} +- name: eval_config + type: + JsonObject: {data_type: 'proto:tensorflow_model_analysis.EvalConfig'} + optional: true +- name: feature_slicing_spec + type: + JsonObject: {data_type: 'proto:tfx.components.evaluator.FeatureSlicingSpec'} + optional: true +- {name: fairness_indicator_thresholds, type: JsonArray, optional: true} +- {name: beam_pipeline_args, type: JsonArray, optional: true} +outputs: +- {name: evaluation_uri, type: ModelEvaluationUri} +- {name: blessing_uri, type: ModelBlessingUri} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def Evaluator( + examples_uri, + model_uri, + output_evaluation_uri, + output_blessing_uri, + baseline_model_uri = None, + schema_uri = None, + eval_config = None, + feature_slicing_spec = None, + fairness_indicator_thresholds = None, + beam_pipeline_args = None, + ): + from tfx.components import Evaluator as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_evaluation_uri, output_blessing_uri, ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='Evaluator', description='') + _parser.add_argument("--examples-uri", dest="examples_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--model-uri", dest="model_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-evaluation-uri", dest="output_evaluation_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-blessing-uri", dest="output_blessing_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--baseline-model-uri", dest="baseline_model_uri", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--schema-uri", dest="schema_uri", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--eval-config", dest="eval_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--feature-slicing-spec", dest="feature_slicing_spec", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--fairness-indicator-thresholds", dest="fairness_indicator_thresholds", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--beam-pipeline-args", dest="beam_pipeline_args", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=2) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = Evaluator(**_parsed_args) + + _output_serializers = [ + str, + str, + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --examples-uri + - {inputValue: examples_uri} + - --model-uri + - {inputValue: model_uri} + - --output-evaluation-uri + - {inputValue: output_evaluation_uri} + - --output-blessing-uri + - {inputValue: output_blessing_uri} + - if: + cond: {isPresent: baseline_model_uri} + then: + - --baseline-model-uri + - {inputValue: baseline_model_uri} + - if: + cond: {isPresent: schema_uri} + then: + - --schema-uri + - {inputValue: schema_uri} + - if: + cond: {isPresent: eval_config} + then: + - --eval-config + - {inputValue: eval_config} + - if: + cond: {isPresent: feature_slicing_spec} + then: + - --feature-slicing-spec + - {inputValue: feature_slicing_spec} + - if: + cond: {isPresent: fairness_indicator_thresholds} + then: + - --fairness-indicator-thresholds + - {inputValue: fairness_indicator_thresholds} + - if: + cond: {isPresent: beam_pipeline_args} + then: + - --beam-pipeline-args + - {inputValue: beam_pipeline_args} + - '----output-paths' + - {outputPath: evaluation_uri} + - {outputPath: blessing_uri} diff --git a/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/component.py b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/component.py new file mode 100644 index 00000000000..988c565d01c --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/component.py @@ -0,0 +1,102 @@ +# flake8: noqa TODO + +from kfp.components import InputPath, OutputPath + + +def BigQueryExampleGen( + examples_path: OutputPath('Examples'), + + query: str = None, + input_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Input'}} = None, + output_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Output'}} = None, + custom_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.CustomConfig'}} = None, +): + """ + Official TFX BigQueryExampleGen component. + + The BigQuery examplegen component takes a query, and generates train + and eval examples for downsteam components. + + + Args: + query: BigQuery sql string, query result will be treated as a single + split, can be overwritten by input_config. + input_config: An example_gen_pb2.Input instance with Split.pattern as + BigQuery sql string. If set, it overwrites the 'query' arg, and allows + different queries per split. + output_config: An example_gen_pb2.Output instance, providing output + configuration. If unset, default splits will be 'train' and 'eval' with + size 2:1. + Returns: + examples: Optional channel of 'ExamplesPath' for output train and + eval examples. + + Raises: + RuntimeError: Only one of query and input_config should be set. + """ + from tfx.components.example_gen.csv_example_gen.component import BigQueryExampleGen as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op( + BigQueryExampleGen, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/component.yaml b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/component.yaml new file mode 100644 index 00000000000..bb3588c334d --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/component.yaml @@ -0,0 +1,187 @@ +name: BigQueryExampleGen +description: |- + Official TFX BigQueryExampleGen component. + + The BigQuery examplegen component takes a query, and generates train + and eval examples for downsteam components. + + + Args: + query: BigQuery sql string, query result will be treated as a single + split, can be overwritten by input_config. + input_config: An example_gen_pb2.Input instance with Split.pattern as + BigQuery sql string. If set, it overwrites the 'query' arg, and allows + different queries per split. + output_config: An example_gen_pb2.Output instance, providing output + configuration. If unset, default splits will be 'train' and 'eval' with + size 2:1. + Returns: + examples: Optional channel of 'ExamplesPath' for output train and + eval examples. + + Raises: + RuntimeError: Only one of query and input_config should be set. +inputs: +- {name: query, type: String, optional: true} +- name: input_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.Input'} + optional: true +- name: output_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.Output'} + optional: true +- name: custom_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.CustomConfig'} + optional: true +outputs: +- {name: examples, type: Examples} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def BigQueryExampleGen( + examples_path , + + query = None, + input_config = None, + output_config = None, + custom_config = None, + ): + """ + Official TFX BigQueryExampleGen component. + + The BigQuery examplegen component takes a query, and generates train + and eval examples for downsteam components. + + Args: + query: BigQuery sql string, query result will be treated as a single + split, can be overwritten by input_config. + input_config: An example_gen_pb2.Input instance with Split.pattern as + BigQuery sql string. If set, it overwrites the 'query' arg, and allows + different queries per split. + output_config: An example_gen_pb2.Output instance, providing output + configuration. If unset, default splits will be 'train' and 'eval' with + size 2:1. + Returns: + examples: Optional channel of 'ExamplesPath' for output train and + eval examples. + + Raises: + RuntimeError: Only one of query and input_config should be set. + """ + from tfx.components.example_gen.csv_example_gen.component import BigQueryExampleGen as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + import argparse + _parser = argparse.ArgumentParser(prog='BigQueryExampleGen', description="Official TFX BigQueryExampleGen component.\n\n The BigQuery examplegen component takes a query, and generates train\n and eval examples for downsteam components.\n\n\n Args:\n query: BigQuery sql string, query result will be treated as a single\n split, can be overwritten by input_config.\n input_config: An example_gen_pb2.Input instance with Split.pattern as\n BigQuery sql string. If set, it overwrites the 'query' arg, and allows\n different queries per split.\n output_config: An example_gen_pb2.Output instance, providing output\n configuration. If unset, default splits will be 'train' and 'eval' with\n size 2:1.\n Returns:\n examples: Optional channel of 'ExamplesPath' for output train and\n eval examples.\n\n Raises:\n RuntimeError: Only one of query and input_config should be set.") + _parser.add_argument("--query", dest="query", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--input-config", dest="input_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--output-config", dest="output_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--custom-config", dest="custom_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--examples", dest="examples_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = BigQueryExampleGen(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - if: + cond: {isPresent: query} + then: + - --query + - {inputValue: query} + - if: + cond: {isPresent: input_config} + then: + - --input-config + - {inputValue: input_config} + - if: + cond: {isPresent: output_config} + then: + - --output-config + - {inputValue: output_config} + - if: + cond: {isPresent: custom_config} + then: + - --custom-config + - {inputValue: custom_config} + - --examples + - {outputPath: examples} diff --git a/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.py b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.py new file mode 100644 index 00000000000..74b411b64cd --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.py @@ -0,0 +1,97 @@ +# flake8: noqa + +from typing import NamedTuple + +def BigQueryExampleGen( + output_examples_uri: 'ExamplesUri', + input_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Input'}}, + output_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Output'}}, + custom_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.CustomConfig'}} = None, + beam_pipeline_args: list = None, +) -> NamedTuple('Outputs', [ + ('examples_uri', 'ExamplesUri'), +]): + from tfx.components import BigQueryExampleGen as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_examples_uri, ) + + +if __name__ == '__main__': + import kfp + kfp.components.create_component_from_func( + BigQueryExampleGen, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.yaml b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.yaml new file mode 100644 index 00000000000..f03e310c3ae --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/BigQueryExampleGen/with_URI_IO/component.yaml @@ -0,0 +1,153 @@ +name: BigQueryExampleGen +inputs: +- {name: output_examples_uri, type: ExamplesUri} +- name: input_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.Input'} +- name: output_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.Output'} +- name: custom_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.CustomConfig'} + optional: true +- {name: beam_pipeline_args, type: JsonArray, optional: true} +outputs: +- {name: examples_uri, type: ExamplesUri} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def BigQueryExampleGen( + output_examples_uri, + input_config, + output_config, + custom_config = None, + beam_pipeline_args = None, + ): + from tfx.components import BigQueryExampleGen as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_examples_uri, ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='BigQueryExampleGen', description='') + _parser.add_argument("--output-examples-uri", dest="output_examples_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--input-config", dest="input_config", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-config", dest="output_config", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--custom-config", dest="custom_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--beam-pipeline-args", dest="beam_pipeline_args", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = BigQueryExampleGen(**_parsed_args) + + _output_serializers = [ + str, + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --output-examples-uri + - {inputValue: output_examples_uri} + - --input-config + - {inputValue: input_config} + - --output-config + - {inputValue: output_config} + - if: + cond: {isPresent: custom_config} + then: + - --custom-config + - {inputValue: custom_config} + - if: + cond: {isPresent: beam_pipeline_args} + then: + - --beam-pipeline-args + - {inputValue: beam_pipeline_args} + - '----output-paths' + - {outputPath: examples_uri} diff --git a/components/deprecated/tfx/ExampleGen/CsvExampleGen/component.py b/components/deprecated/tfx/ExampleGen/CsvExampleGen/component.py new file mode 100644 index 00000000000..6acd808110c --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/CsvExampleGen/component.py @@ -0,0 +1,97 @@ +# flake8: noqa TODO + +from kfp.components import InputPath, OutputPath + +def CsvExampleGen( + # Inputs + input_path: InputPath('ExternalArtifact'), + + # Outputs + examples_path: OutputPath('Examples'), + + # Execution properties + input_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Input'}} = None, # = '{"splits": []}', # JSON-serialized example_gen_pb2.Input instance, providing input configuration. If unset, the files under input_base will be treated as a single split. + output_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Output'}} = None, # = '{"splitConfig": {"splits": []}}', # JSON-serialized example_gen_pb2.Output instance, providing output configuration. If unset, default splits will be 'train' and 'eval' with size 2:1. + custom_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.CustomConfig'}} = None, +): + """Executes the CsvExampleGen component. + + Args: + input: A Channel of 'ExternalPath' type, which includes one artifact + whose uri is an external directory with csv files inside (required). + input_config: An example_gen_pb2.Input instance, providing input + configuration. If unset, the files under input will be treated as a + single split. + output_config: An example_gen_pb2.Output instance, providing output + configuration. If unset, default splits will be 'train' and 'eval' with + size 2:1. + Returns: + examples: Artifact of type 'Examples' for output train and + eval examples. + """ + from tfx.components.example_gen.csv_example_gen.component import CsvExampleGen as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op( + CsvExampleGen, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/ExampleGen/CsvExampleGen/component.yaml b/components/deprecated/tfx/ExampleGen/CsvExampleGen/component.yaml new file mode 100644 index 00000000000..ecedc272ba8 --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/CsvExampleGen/component.yaml @@ -0,0 +1,174 @@ +name: CsvExampleGen +description: |- + Executes the CsvExampleGen component. + + Args: + input: A Channel of 'ExternalPath' type, which includes one artifact + whose uri is an external directory with csv files inside (required). + input_config: An example_gen_pb2.Input instance, providing input + configuration. If unset, the files under input will be treated as a + single split. + output_config: An example_gen_pb2.Output instance, providing output + configuration. If unset, default splits will be 'train' and 'eval' with + size 2:1. + Returns: + examples: Artifact of type 'Examples' for output train and + eval examples. +inputs: +- {name: input, type: ExternalArtifact} +- name: input_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.Input'} + optional: true +- name: output_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.Output'} + optional: true +- name: custom_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.CustomConfig'} + optional: true +outputs: +- {name: examples, type: Examples} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def CsvExampleGen( + # Inputs + input_path , + + # Outputs + examples_path , + + # Execution properties + input_config = None, # = '{"splits": []}', # JSON-serialized example_gen_pb2.Input instance, providing input configuration. If unset, the files under input_base will be treated as a single split. + output_config = None, # = '{"splitConfig": {"splits": []}}', # JSON-serialized example_gen_pb2.Output instance, providing output configuration. If unset, default splits will be 'train' and 'eval' with size 2:1. + custom_config = None, + ): + """Executes the CsvExampleGen component. + + Args: + input: A Channel of 'ExternalPath' type, which includes one artifact + whose uri is an external directory with csv files inside (required). + input_config: An example_gen_pb2.Input instance, providing input + configuration. If unset, the files under input will be treated as a + single split. + output_config: An example_gen_pb2.Output instance, providing output + configuration. If unset, default splits will be 'train' and 'eval' with + size 2:1. + Returns: + examples: Artifact of type 'Examples' for output train and + eval examples. + """ + from tfx.components.example_gen.csv_example_gen.component import CsvExampleGen as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + import argparse + _parser = argparse.ArgumentParser(prog='CsvExampleGen', description="Executes the CsvExampleGen component.\n\n Args:\n input: A Channel of 'ExternalPath' type, which includes one artifact\n whose uri is an external directory with csv files inside (required).\n input_config: An example_gen_pb2.Input instance, providing input\n configuration. If unset, the files under input will be treated as a\n single split.\n output_config: An example_gen_pb2.Output instance, providing output\n configuration. If unset, default splits will be 'train' and 'eval' with\n size 2:1.\n Returns:\n examples: Artifact of type 'Examples' for output train and\n eval examples.") + _parser.add_argument("--input", dest="input_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--input-config", dest="input_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--output-config", dest="output_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--custom-config", dest="custom_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--examples", dest="examples_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = CsvExampleGen(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --input + - {inputPath: input} + - if: + cond: {isPresent: input_config} + then: + - --input-config + - {inputValue: input_config} + - if: + cond: {isPresent: output_config} + then: + - --output-config + - {inputValue: output_config} + - if: + cond: {isPresent: custom_config} + then: + - --custom-config + - {inputValue: custom_config} + - --examples + - {outputPath: examples} diff --git a/components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py b/components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py new file mode 100644 index 00000000000..bec721f6757 --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.py @@ -0,0 +1,98 @@ +# flake8: noqa + +from typing import NamedTuple + +def CsvExampleGen( + input_uri: 'ExternalArtifactUri', + output_examples_uri: 'ExamplesUri', + input_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Input'}}, + output_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Output'}}, + custom_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.CustomConfig'}} = None, + beam_pipeline_args: list = None, +) -> NamedTuple('Outputs', [ + ('examples_uri', 'ExamplesUri'), +]): + from tfx.components import CsvExampleGen as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_examples_uri, ) + + +if __name__ == '__main__': + import kfp + kfp.components.create_component_from_func( + CsvExampleGen, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.yaml b/components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.yaml new file mode 100644 index 00000000000..92d85e8703d --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.yaml @@ -0,0 +1,158 @@ +name: CsvExampleGen +inputs: +- {name: input_uri, type: ExternalArtifactUri} +- {name: output_examples_uri, type: ExamplesUri} +- name: input_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.Input'} +- name: output_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.Output'} +- name: custom_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.CustomConfig'} + optional: true +- {name: beam_pipeline_args, type: JsonArray, optional: true} +outputs: +- {name: examples_uri, type: ExamplesUri} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def CsvExampleGen( + input_uri, + output_examples_uri, + input_config, + output_config, + custom_config = None, + beam_pipeline_args = None, + ): + from tfx.components import CsvExampleGen as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_examples_uri, ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='CsvExampleGen', description='') + _parser.add_argument("--input-uri", dest="input_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-examples-uri", dest="output_examples_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--input-config", dest="input_config", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-config", dest="output_config", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--custom-config", dest="custom_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--beam-pipeline-args", dest="beam_pipeline_args", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = CsvExampleGen(**_parsed_args) + + _output_serializers = [ + str, + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --input-uri + - {inputValue: input_uri} + - --output-examples-uri + - {inputValue: output_examples_uri} + - --input-config + - {inputValue: input_config} + - --output-config + - {inputValue: output_config} + - if: + cond: {isPresent: custom_config} + then: + - --custom-config + - {inputValue: custom_config} + - if: + cond: {isPresent: beam_pipeline_args} + then: + - --beam-pipeline-args + - {inputValue: beam_pipeline_args} + - '----output-paths' + - {outputPath: examples_uri} diff --git a/components/deprecated/tfx/ExampleGen/ImportExampleGen/component.py b/components/deprecated/tfx/ExampleGen/ImportExampleGen/component.py new file mode 100644 index 00000000000..80ef66e8710 --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/ImportExampleGen/component.py @@ -0,0 +1,105 @@ +# flake8: noqa TODO + +from kfp.components import InputPath, OutputPath + + +def ImportExampleGen( + input_base_path: InputPath('ExternalPath'), + #input_path: InputPath('ExternalPath'), + + examples_path: OutputPath('Examples'), + + input_config: 'JsonObject: example_gen_pb2.Input' = None, + output_config: 'JsonObject: example_gen_pb2.Output' = None, +): + """ + TFX ImportExampleGen component. + + The ImportExampleGen component takes TFRecord files with TF Example data + format, and generates train and eval examples for downsteam components. + This component provides consistent and configurable partition, and it also + shuffle the dataset for ML best practice. + + Args: + input: A Channel of 'ExternalPath' type, which includes one artifact + whose uri is an external directory with TFRecord files inside + (required). + input_config: An example_gen_pb2.Input instance, providing input + configuration. If unset, the files under input_base will be treated as a + single split. + output_config: An example_gen_pb2.Output instance, providing output + configuration. If unset, default splits will be 'train' and 'eval' with + size 2:1. + Returns: + examples: Optional channel of 'ExamplesPath' for output train and + eval examples. + + Raises: + RuntimeError: Only one of query and input_config should be set. + """ + from tfx.components.example_gen.import_example_gen.component import ImportExampleGen as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op( + ImportExampleGen, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/ExampleGen/ImportExampleGen/component.yaml b/components/deprecated/tfx/ExampleGen/ImportExampleGen/component.yaml new file mode 100644 index 00000000000..001fab87b4b --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/ImportExampleGen/component.yaml @@ -0,0 +1,174 @@ +name: ImportExampleGen +description: |- + TFX ImportExampleGen component. + + The ImportExampleGen component takes TFRecord files with TF Example data + format, and generates train and eval examples for downsteam components. + This component provides consistent and configurable partition, and it also + shuffle the dataset for ML best practice. + + Args: + input: A Channel of 'ExternalPath' type, which includes one artifact + whose uri is an external directory with TFRecord files inside + (required). + input_config: An example_gen_pb2.Input instance, providing input + configuration. If unset, the files under input_base will be treated as a + single split. + output_config: An example_gen_pb2.Output instance, providing output + configuration. If unset, default splits will be 'train' and 'eval' with + size 2:1. + Returns: + examples: Optional channel of 'ExamplesPath' for output train and + eval examples. + + Raises: + RuntimeError: Only one of query and input_config should be set. +inputs: +- {name: input_base, type: ExternalPath} +- {name: input_config, type: 'JsonObject: example_gen_pb2.Input', optional: true} +- {name: output_config, type: 'JsonObject: example_gen_pb2.Output', optional: true} +outputs: +- {name: examples, type: Examples} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def ImportExampleGen( + input_base_path , + #input_path: InputPath('ExternalPath'), + + examples_path , + + input_config = None, + output_config = None, + ): + """ + TFX ImportExampleGen component. + + The ImportExampleGen component takes TFRecord files with TF Example data + format, and generates train and eval examples for downsteam components. + This component provides consistent and configurable partition, and it also + shuffle the dataset for ML best practice. + + Args: + input: A Channel of 'ExternalPath' type, which includes one artifact + whose uri is an external directory with TFRecord files inside + (required). + input_config: An example_gen_pb2.Input instance, providing input + configuration. If unset, the files under input_base will be treated as a + single split. + output_config: An example_gen_pb2.Output instance, providing output + configuration. If unset, default splits will be 'train' and 'eval' with + size 2:1. + Returns: + examples: Optional channel of 'ExamplesPath' for output train and + eval examples. + + Raises: + RuntimeError: Only one of query and input_config should be set. + """ + from tfx.components.example_gen.import_example_gen.component import ImportExampleGen as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + import argparse + _parser = argparse.ArgumentParser(prog='ImportExampleGen', description="TFX ImportExampleGen component.\n\n The ImportExampleGen component takes TFRecord files with TF Example data\n format, and generates train and eval examples for downsteam components.\n This component provides consistent and configurable partition, and it also\n shuffle the dataset for ML best practice.\n\n Args:\n input: A Channel of 'ExternalPath' type, which includes one artifact\n whose uri is an external directory with TFRecord files inside\n (required).\n input_config: An example_gen_pb2.Input instance, providing input\n configuration. If unset, the files under input_base will be treated as a\n single split.\n output_config: An example_gen_pb2.Output instance, providing output\n configuration. If unset, default splits will be 'train' and 'eval' with\n size 2:1.\n Returns:\n examples: Optional channel of 'ExamplesPath' for output train and\n eval examples.\n\n Raises:\n RuntimeError: Only one of query and input_config should be set.") + _parser.add_argument("--input-base", dest="input_base_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--input-config", dest="input_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--output-config", dest="output_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--examples", dest="examples_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = ImportExampleGen(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --input-base + - {inputPath: input_base} + - if: + cond: {isPresent: input_config} + then: + - --input-config + - {inputValue: input_config} + - if: + cond: {isPresent: output_config} + then: + - --output-config + - {inputValue: output_config} + - --examples + - {outputPath: examples} diff --git a/components/deprecated/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.py b/components/deprecated/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.py new file mode 100644 index 00000000000..f3304f66c90 --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.py @@ -0,0 +1,98 @@ +# flake8: noqa + +from typing import NamedTuple + +def ImportExampleGen( + input_uri: 'ExternalArtifactUri', + output_examples_uri: 'ExamplesUri', + input_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Input'}}, + output_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.Output'}}, + custom_config: {'JsonObject': {'data_type': 'proto:tfx.components.example_gen.CustomConfig'}} = None, + beam_pipeline_args: list = None, +) -> NamedTuple('Outputs', [ + ('examples_uri', 'ExamplesUri'), +]): + from tfx.components import ImportExampleGen as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_examples_uri, ) + + +if __name__ == '__main__': + import kfp + kfp.components.create_component_from_func( + ImportExampleGen, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.yaml b/components/deprecated/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.yaml new file mode 100644 index 00000000000..e9bde562670 --- /dev/null +++ b/components/deprecated/tfx/ExampleGen/ImportExampleGen/with_URI_IO/component.yaml @@ -0,0 +1,158 @@ +name: ImportExampleGen +inputs: +- {name: input_uri, type: ExternalArtifactUri} +- {name: output_examples_uri, type: ExamplesUri} +- name: input_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.Input'} +- name: output_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.Output'} +- name: custom_config + type: + JsonObject: {data_type: 'proto:tfx.components.example_gen.CustomConfig'} + optional: true +- {name: beam_pipeline_args, type: JsonArray, optional: true} +outputs: +- {name: examples_uri, type: ExamplesUri} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def ImportExampleGen( + input_uri, + output_examples_uri, + input_config, + output_config, + custom_config = None, + beam_pipeline_args = None, + ): + from tfx.components import ImportExampleGen as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_examples_uri, ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='ImportExampleGen', description='') + _parser.add_argument("--input-uri", dest="input_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-examples-uri", dest="output_examples_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--input-config", dest="input_config", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-config", dest="output_config", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--custom-config", dest="custom_config", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--beam-pipeline-args", dest="beam_pipeline_args", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = ImportExampleGen(**_parsed_args) + + _output_serializers = [ + str, + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --input-uri + - {inputValue: input_uri} + - --output-examples-uri + - {inputValue: output_examples_uri} + - --input-config + - {inputValue: input_config} + - --output-config + - {inputValue: output_config} + - if: + cond: {isPresent: custom_config} + then: + - --custom-config + - {inputValue: custom_config} + - if: + cond: {isPresent: beam_pipeline_args} + then: + - --beam-pipeline-args + - {inputValue: beam_pipeline_args} + - '----output-paths' + - {outputPath: examples_uri} diff --git a/components/deprecated/tfx/ExampleValidator/component.py b/components/deprecated/tfx/ExampleValidator/component.py new file mode 100644 index 00000000000..331a4814c6e --- /dev/null +++ b/components/deprecated/tfx/ExampleValidator/component.py @@ -0,0 +1,108 @@ +from kfp.components import InputPath, OutputPath + + +def ExampleValidator( + statistics_path: InputPath('ExampleStatistics'), + schema_path: InputPath('Schema'), + + anomalies_path: OutputPath('ExampleAnomalies'), +): + """ + A TFX component to validate input examples. + + The ExampleValidator component uses [Tensorflow Data + Validation](https://www.tensorflow.org/tfx/data_validation) to + validate the statistics of some splits on input examples against a schema. + + The ExampleValidator component identifies anomalies in training and serving + data. The component can be configured to detect different classes of anomalies + in the data. It can: + - perform validity checks by comparing data statistics against a schema that + codifies expectations of the user. + - detect data drift by looking at a series of data. + - detect changes in dataset-wide data (i.e., num_examples) across spans or + versions. + + Schema Based Example Validation + The ExampleValidator component identifies any anomalies in the example data by + comparing data statistics computed by the StatisticsGen component against a + schema. The schema codifies properties which the input data is expected to + satisfy, and is provided and maintained by the user. + + Please see https://www.tensorflow.org/tfx/data_validation for more details. + + Args: + statistics: A Channel of 'ExampleStatistics` type. This should contain at + least 'eval' split. Other splits are ignored currently. + schema: A Channel of "Schema' type. _required_ + Returns: + anomalies: Output channel of 'ExampleAnomalies' type. + + Either `stats` or `statistics` must be present in the arguments. + """ + from tfx.components.example_validator.component import ExampleValidator as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op( + ExampleValidator, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/ExampleValidator/component.yaml b/components/deprecated/tfx/ExampleValidator/component.yaml new file mode 100644 index 00000000000..d0cba093d7b --- /dev/null +++ b/components/deprecated/tfx/ExampleValidator/component.yaml @@ -0,0 +1,177 @@ +name: ExampleValidator +description: |- + A TFX component to validate input examples. + + The ExampleValidator component uses [Tensorflow Data + Validation](https://www.tensorflow.org/tfx/data_validation) to + validate the statistics of some splits on input examples against a schema. + + The ExampleValidator component identifies anomalies in training and serving + data. The component can be configured to detect different classes of anomalies + in the data. It can: + - perform validity checks by comparing data statistics against a schema that + codifies expectations of the user. + - detect data drift by looking at a series of data. + - detect changes in dataset-wide data (i.e., num_examples) across spans or + versions. + + Schema Based Example Validation + The ExampleValidator component identifies any anomalies in the example data by + comparing data statistics computed by the StatisticsGen component against a + schema. The schema codifies properties which the input data is expected to + satisfy, and is provided and maintained by the user. + + Please see https://www.tensorflow.org/tfx/data_validation for more details. + + Args: + statistics: A Channel of 'ExampleStatistics` type. This should contain at + least 'eval' split. Other splits are ignored currently. + schema: A Channel of "Schema' type. _required_ + Returns: + anomalies: Output channel of 'ExampleAnomalies' type. + + Either `stats` or `statistics` must be present in the arguments. +inputs: +- {name: statistics, type: ExampleStatistics} +- {name: schema, type: Schema} +outputs: +- {name: anomalies, type: ExampleAnomalies} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def ExampleValidator( + statistics_path , + schema_path , + + anomalies_path , + ): + """ + A TFX component to validate input examples. + + The ExampleValidator component uses [Tensorflow Data + Validation](https://www.tensorflow.org/tfx/data_validation) to + validate the statistics of some splits on input examples against a schema. + + The ExampleValidator component identifies anomalies in training and serving + data. The component can be configured to detect different classes of anomalies + in the data. It can: + - perform validity checks by comparing data statistics against a schema that + codifies expectations of the user. + - detect data drift by looking at a series of data. + - detect changes in dataset-wide data (i.e., num_examples) across spans or + versions. + + Schema Based Example Validation + The ExampleValidator component identifies any anomalies in the example data by + comparing data statistics computed by the StatisticsGen component against a + schema. The schema codifies properties which the input data is expected to + satisfy, and is provided and maintained by the user. + + Please see https://www.tensorflow.org/tfx/data_validation for more details. + + Args: + statistics: A Channel of 'ExampleStatistics` type. This should contain at + least 'eval' split. Other splits are ignored currently. + schema: A Channel of "Schema' type. _required_ + Returns: + anomalies: Output channel of 'ExampleAnomalies' type. + + Either `stats` or `statistics` must be present in the arguments. + """ + from tfx.components.example_validator.component import ExampleValidator as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + import argparse + _parser = argparse.ArgumentParser(prog='ExampleValidator', description='A TFX component to validate input examples.\n\n The ExampleValidator component uses [Tensorflow Data\n Validation](https://www.tensorflow.org/tfx/data_validation) to\n validate the statistics of some splits on input examples against a schema.\n\n The ExampleValidator component identifies anomalies in training and serving\n data. The component can be configured to detect different classes of anomalies\n in the data. It can:\n - perform validity checks by comparing data statistics against a schema that\n codifies expectations of the user.\n - detect data drift by looking at a series of data.\n - detect changes in dataset-wide data (i.e., num_examples) across spans or\n versions.\n\n Schema Based Example Validation\n The ExampleValidator component identifies any anomalies in the example data by\n comparing data statistics computed by the StatisticsGen component against a\n schema. The schema codifies properties which the input data is expected to\n satisfy, and is provided and maintained by the user.\n\n Please see https://www.tensorflow.org/tfx/data_validation for more details.\n\n Args:\n statistics: A Channel of \'ExampleStatistics` type. This should contain at\n least \'eval\' split. Other splits are ignored currently.\n schema: A Channel of "Schema\' type. _required_\n Returns:\n anomalies: Output channel of \'ExampleAnomalies\' type.\n\n Either `stats` or `statistics` must be present in the arguments.') + _parser.add_argument("--statistics", dest="statistics_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--schema", dest="schema_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--anomalies", dest="anomalies_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = ExampleValidator(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --statistics + - {inputPath: statistics} + - --schema + - {inputPath: schema} + - --anomalies + - {outputPath: anomalies} diff --git a/components/deprecated/tfx/ExampleValidator/with_URI_IO/component.py b/components/deprecated/tfx/ExampleValidator/with_URI_IO/component.py new file mode 100644 index 00000000000..15b870d633d --- /dev/null +++ b/components/deprecated/tfx/ExampleValidator/with_URI_IO/component.py @@ -0,0 +1,96 @@ +# flake8: noqa + +from typing import NamedTuple + +def ExampleValidator( + statistics_uri: 'ExampleStatisticsUri', + schema_uri: 'SchemaUri', + output_anomalies_uri: 'ExampleAnomaliesUri', + beam_pipeline_args: list = None, +) -> NamedTuple('Outputs', [ + ('anomalies_uri', 'ExampleAnomaliesUri'), +]): + from tfx.components import ExampleValidator as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_anomalies_uri, ) + + +if __name__ == '__main__': + import kfp + kfp.components.create_component_from_func( + ExampleValidator, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/ExampleValidator/with_URI_IO/component.yaml b/components/deprecated/tfx/ExampleValidator/with_URI_IO/component.yaml new file mode 100644 index 00000000000..457c40a447a --- /dev/null +++ b/components/deprecated/tfx/ExampleValidator/with_URI_IO/component.yaml @@ -0,0 +1,138 @@ +name: ExampleValidator +inputs: +- {name: statistics_uri, type: ExampleStatisticsUri} +- {name: schema_uri, type: SchemaUri} +- {name: output_anomalies_uri, type: ExampleAnomaliesUri} +- {name: beam_pipeline_args, type: JsonArray, optional: true} +outputs: +- {name: anomalies_uri, type: ExampleAnomaliesUri} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def ExampleValidator( + statistics_uri, + schema_uri, + output_anomalies_uri, + beam_pipeline_args = None, + ): + from tfx.components import ExampleValidator as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_anomalies_uri, ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='ExampleValidator', description='') + _parser.add_argument("--statistics-uri", dest="statistics_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--schema-uri", dest="schema_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-anomalies-uri", dest="output_anomalies_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--beam-pipeline-args", dest="beam_pipeline_args", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = ExampleValidator(**_parsed_args) + + _output_serializers = [ + str, + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --statistics-uri + - {inputValue: statistics_uri} + - --schema-uri + - {inputValue: schema_uri} + - --output-anomalies-uri + - {inputValue: output_anomalies_uri} + - if: + cond: {isPresent: beam_pipeline_args} + then: + - --beam-pipeline-args + - {inputValue: beam_pipeline_args} + - '----output-paths' + - {outputPath: anomalies_uri} diff --git a/components/deprecated/tfx/README.md b/components/deprecated/tfx/README.md new file mode 100644 index 00000000000..24ebd681129 --- /dev/null +++ b/components/deprecated/tfx/README.md @@ -0,0 +1,25 @@ +## Versions of TFX components that can be used with KFP SDK + +Disclaimer: The components in this directory are unofficial and are maintained by the KFP team not the TFX team. + +If you experience any issues in this components please create a new issue in the Kubeflow Pipelines repo and assign it to Ark-kun. + +These components were created to allow the users to use TFX components in their KFP pipelines, to be able to mix KFP and TFX components. + +If your pipeline uses only TFX components, please use the official [TFX SDK](https://www.tensorflow.org/tfx/tutorials/tfx/cloud-ai-platform-pipelines). + +See the [sample pipeline](_samples/TFX_pipeline.ipynb) which showcases most of the components. + +The components come in two flavors - with file-based I/O and URI-based I/O. With file-based I/O the system takes care of storing output data and making it available to downstream components. +With URI-based I/O, only the URIs pointing to the data are passed between components and the pipeline author is responsible for providing unique URIs for all output artifacts of the components in the pipeline. + +### Google Cloud Dataflow + +The TFX components that use URI-based I/O allow specifying the `beam_pipeline_args` parameter that enables setting the runner to `DataflowRunner` which executes the components on Google Cloud Dataflow. +See the [sample TFX on Dataflow pipeline](_samples/TFX_Dataflow_pipeline.ipynb). + + +Aspects and limitations +* These components use the official TFX container image. +* These components run the executors and component classes of the official TFX components. +* These components do not execute TFX [drivers](https://www.tensorflow.org/tfx/api_docs/python/tfx/components/base/base_driver), so they do not log metadata themselves (the metadata is logged by the Metadata Writer service instead). The properties of artifacts are currently not logged. diff --git a/components/deprecated/tfx/SchemaGen/component.py b/components/deprecated/tfx/SchemaGen/component.py new file mode 100644 index 00000000000..d5dbf1b2ad6 --- /dev/null +++ b/components/deprecated/tfx/SchemaGen/component.py @@ -0,0 +1,88 @@ +from kfp.components import InputPath, OutputPath + + +def SchemaGen( + statistics_path: InputPath('ExampleStatistics'), + schema_path: OutputPath('Schema'), + infer_feature_shape: bool = None, # ? False +): + """Constructs a SchemaGen component. + + Args: + statistics: A Channel of `ExampleStatistics` type (required if spec is not + passed). This should contain at least a `train` split. Other splits are + currently ignored. _required_ + infer_feature_shape: Boolean value indicating + whether or not to infer the shape of features. If the feature shape is + not inferred, downstream Tensorflow Transform component using the schema + will parse input as tf.SparseTensor. + Returns: + output: Output `Schema` channel for schema result. + """ + from tfx.components.schema_gen.component import SchemaGen as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + #return (output_path,) + + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op( + SchemaGen, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/SchemaGen/component.yaml b/components/deprecated/tfx/SchemaGen/component.yaml new file mode 100644 index 00000000000..b865c4ad84e --- /dev/null +++ b/components/deprecated/tfx/SchemaGen/component.yaml @@ -0,0 +1,144 @@ +name: SchemaGen +description: |- + Constructs a SchemaGen component. + + Args: + statistics: A Channel of `ExampleStatistics` type (required if spec is not + passed). This should contain at least a `train` split. Other splits are + currently ignored. _required_ + infer_feature_shape: Boolean value indicating + whether or not to infer the shape of features. If the feature shape is + not inferred, downstream Tensorflow Transform component using the schema + will parse input as tf.SparseTensor. + Returns: + output: Output `Schema` channel for schema result. +inputs: +- {name: statistics, type: ExampleStatistics} +- {name: infer_feature_shape, type: Boolean, optional: true} +outputs: +- {name: schema, type: Schema} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def SchemaGen( + statistics_path , + schema_path , + infer_feature_shape = None, # ? False + ): + """Constructs a SchemaGen component. + + Args: + statistics: A Channel of `ExampleStatistics` type (required if spec is not + passed). This should contain at least a `train` split. Other splits are + currently ignored. _required_ + infer_feature_shape: Boolean value indicating + whether or not to infer the shape of features. If the feature shape is + not inferred, downstream Tensorflow Transform component using the schema + will parse input as tf.SparseTensor. + Returns: + output: Output `Schema` channel for schema result. + """ + from tfx.components.schema_gen.component import SchemaGen as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + def _deserialize_bool(s) -> bool: + from distutils.util import strtobool + return strtobool(s) == 1 + + import argparse + _parser = argparse.ArgumentParser(prog='SchemaGen', description='Constructs a SchemaGen component.\n\n Args:\n statistics: A Channel of `ExampleStatistics` type (required if spec is not\n passed). This should contain at least a `train` split. Other splits are\n currently ignored. _required_\n infer_feature_shape: Boolean value indicating\n whether or not to infer the shape of features. If the feature shape is\n not inferred, downstream Tensorflow Transform component using the schema\n will parse input as tf.SparseTensor.\n Returns:\n output: Output `Schema` channel for schema result.') + _parser.add_argument("--statistics", dest="statistics_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--infer-feature-shape", dest="infer_feature_shape", type=_deserialize_bool, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--schema", dest="schema_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = SchemaGen(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --statistics + - {inputPath: statistics} + - if: + cond: {isPresent: infer_feature_shape} + then: + - --infer-feature-shape + - {inputValue: infer_feature_shape} + - --schema + - {outputPath: schema} diff --git a/components/deprecated/tfx/SchemaGen/with_URI_IO/component.py b/components/deprecated/tfx/SchemaGen/with_URI_IO/component.py new file mode 100644 index 00000000000..d5be86307a1 --- /dev/null +++ b/components/deprecated/tfx/SchemaGen/with_URI_IO/component.py @@ -0,0 +1,96 @@ +# flake8: noqa + +from typing import NamedTuple + +def SchemaGen( + statistics_uri: 'ExampleStatisticsUri', + output_schema_uri: 'SchemaUri', + infer_feature_shape: bool = None, + beam_pipeline_args: list = None, +) -> NamedTuple('Outputs', [ + ('schema_uri', 'SchemaUri'), +]): + from tfx.components import SchemaGen as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_schema_uri, ) + + +if __name__ == '__main__': + import kfp + kfp.components.create_component_from_func( + SchemaGen, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/SchemaGen/with_URI_IO/component.yaml b/components/deprecated/tfx/SchemaGen/with_URI_IO/component.yaml new file mode 100644 index 00000000000..7eb623ab79a --- /dev/null +++ b/components/deprecated/tfx/SchemaGen/with_URI_IO/component.yaml @@ -0,0 +1,145 @@ +name: SchemaGen +inputs: +- {name: statistics_uri, type: ExampleStatisticsUri} +- {name: output_schema_uri, type: SchemaUri} +- {name: infer_feature_shape, type: Boolean, optional: true} +- {name: beam_pipeline_args, type: JsonArray, optional: true} +outputs: +- {name: schema_uri, type: SchemaUri} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def SchemaGen( + statistics_uri, + output_schema_uri, + infer_feature_shape = None, + beam_pipeline_args = None, + ): + from tfx.components import SchemaGen as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_schema_uri, ) + + def _deserialize_bool(s) -> bool: + from distutils.util import strtobool + return strtobool(s) == 1 + + import json + import argparse + _parser = argparse.ArgumentParser(prog='SchemaGen', description='') + _parser.add_argument("--statistics-uri", dest="statistics_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-schema-uri", dest="output_schema_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--infer-feature-shape", dest="infer_feature_shape", type=_deserialize_bool, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--beam-pipeline-args", dest="beam_pipeline_args", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = SchemaGen(**_parsed_args) + + _output_serializers = [ + str, + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --statistics-uri + - {inputValue: statistics_uri} + - --output-schema-uri + - {inputValue: output_schema_uri} + - if: + cond: {isPresent: infer_feature_shape} + then: + - --infer-feature-shape + - {inputValue: infer_feature_shape} + - if: + cond: {isPresent: beam_pipeline_args} + then: + - --beam-pipeline-args + - {inputValue: beam_pipeline_args} + - '----output-paths' + - {outputPath: schema_uri} diff --git a/components/deprecated/tfx/StatisticsGen/component.py b/components/deprecated/tfx/StatisticsGen/component.py new file mode 100644 index 00000000000..7cc84ad7cf3 --- /dev/null +++ b/components/deprecated/tfx/StatisticsGen/component.py @@ -0,0 +1,98 @@ +from kfp.components import InputPath, OutputPath + + +def StatisticsGen( + examples_path: InputPath('Examples'), + #examples_path: 'ExamplesUri', + + statistics_path: OutputPath('ExampleStatistics'), + #statistics_path: 'ExampleStatisticsUri', + + stats_options: {'JsonObject': {'data_type': 'proto:tensorflow_data_validation.StatsOptions'}} = None, + + schema_path: InputPath('Schema') = None, + #schema_path: 'SchemaUri' = None, +): +#) -> NamedTuple('Outputs', [ +# ('statistics', 'ExampleStatisticsUri'), +#]): + """Construct a StatisticsGen component. + + Args: + examples: A Channel of `ExamplesPath` type, likely generated by the + [ExampleGen component](https://www.tensorflow.org/tfx/guide/examplegen). + This needs to contain two splits labeled `train` and `eval`. _required_ + schema: A `Schema` channel to use for automatically configuring the value + of stats options passed to TFDV. + + Returns: + statistics: `ExampleStatistics` channel for statistics of each split + provided in the input examples. + """ + from tfx.components.statistics_gen.component import StatisticsGen as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + #return (statistics_path,) + + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op( + StatisticsGen, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/StatisticsGen/component.yaml b/components/deprecated/tfx/StatisticsGen/component.yaml new file mode 100644 index 00000000000..861b8c923f0 --- /dev/null +++ b/components/deprecated/tfx/StatisticsGen/component.yaml @@ -0,0 +1,160 @@ +name: StatisticsGen +description: |- + Construct a StatisticsGen component. + + Args: + examples: A Channel of `ExamplesPath` type, likely generated by the + [ExampleGen component](https://www.tensorflow.org/tfx/guide/examplegen). + This needs to contain two splits labeled `train` and `eval`. _required_ + schema: A `Schema` channel to use for automatically configuring the value + of stats options passed to TFDV. + + Returns: + statistics: `ExampleStatistics` channel for statistics of each split + provided in the input examples. +inputs: +- {name: examples, type: Examples} +- name: stats_options + type: + JsonObject: {data_type: 'proto:tensorflow_data_validation.StatsOptions'} + optional: true +- {name: schema, type: Schema, optional: true} +outputs: +- {name: statistics, type: ExampleStatistics} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def StatisticsGen( + examples_path , + #examples_path: 'ExamplesUri', + + statistics_path , + #statistics_path: 'ExampleStatisticsUri', + + stats_options = None, + + schema_path = None, + #schema_path: 'SchemaUri' = None, + ): + #) -> NamedTuple('Outputs', [ + # ('statistics', 'ExampleStatisticsUri'), + #]): + """Construct a StatisticsGen component. + + Args: + examples: A Channel of `ExamplesPath` type, likely generated by the + [ExampleGen component](https://www.tensorflow.org/tfx/guide/examplegen). + This needs to contain two splits labeled `train` and `eval`. _required_ + schema: A `Schema` channel to use for automatically configuring the value + of stats options passed to TFDV. + + Returns: + statistics: `ExampleStatistics` channel for statistics of each split + provided in the input examples. + """ + from tfx.components.statistics_gen.component import StatisticsGen as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + import argparse + _parser = argparse.ArgumentParser(prog='StatisticsGen', description='Construct a StatisticsGen component.\n\n Args:\n examples: A Channel of `ExamplesPath` type, likely generated by the\n [ExampleGen component](https://www.tensorflow.org/tfx/guide/examplegen).\n This needs to contain two splits labeled `train` and `eval`. _required_\n schema: A `Schema` channel to use for automatically configuring the value\n of stats options passed to TFDV.\n\n Returns:\n statistics: `ExampleStatistics` channel for statistics of each split\n provided in the input examples.') + _parser.add_argument("--examples", dest="examples_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--stats-options", dest="stats_options", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--schema", dest="schema_path", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--statistics", dest="statistics_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = StatisticsGen(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --examples + - {inputPath: examples} + - if: + cond: {isPresent: stats_options} + then: + - --stats-options + - {inputValue: stats_options} + - if: + cond: {isPresent: schema} + then: + - --schema + - {inputPath: schema} + - --statistics + - {outputPath: statistics} diff --git a/components/deprecated/tfx/StatisticsGen/with_URI_IO/component.py b/components/deprecated/tfx/StatisticsGen/with_URI_IO/component.py new file mode 100644 index 00000000000..e78cb0cb902 --- /dev/null +++ b/components/deprecated/tfx/StatisticsGen/with_URI_IO/component.py @@ -0,0 +1,97 @@ +# flake8: noqa + +from typing import NamedTuple + +def StatisticsGen( + examples_uri: 'ExamplesUri', + output_statistics_uri: 'ExampleStatisticsUri', + schema_uri: 'SchemaUri' = None, + stats_options_json: str = None, + beam_pipeline_args: list = None, +) -> NamedTuple('Outputs', [ + ('statistics_uri', 'ExampleStatisticsUri'), +]): + from tfx.components import StatisticsGen as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_statistics_uri, ) + + +if __name__ == '__main__': + import kfp + kfp.components.create_component_from_func( + StatisticsGen, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/StatisticsGen/with_URI_IO/component.yaml b/components/deprecated/tfx/StatisticsGen/with_URI_IO/component.yaml new file mode 100644 index 00000000000..19413100d9f --- /dev/null +++ b/components/deprecated/tfx/StatisticsGen/with_URI_IO/component.yaml @@ -0,0 +1,149 @@ +name: StatisticsGen +inputs: +- {name: examples_uri, type: ExamplesUri} +- {name: output_statistics_uri, type: ExampleStatisticsUri} +- {name: schema_uri, type: SchemaUri, optional: true} +- {name: stats_options_json, type: String, optional: true} +- {name: beam_pipeline_args, type: JsonArray, optional: true} +outputs: +- {name: statistics_uri, type: ExampleStatisticsUri} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def StatisticsGen( + examples_uri, + output_statistics_uri, + schema_uri = None, + stats_options_json = None, + beam_pipeline_args = None, + ): + from tfx.components import StatisticsGen as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_statistics_uri, ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='StatisticsGen', description='') + _parser.add_argument("--examples-uri", dest="examples_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-statistics-uri", dest="output_statistics_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--schema-uri", dest="schema_uri", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--stats-options-json", dest="stats_options_json", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--beam-pipeline-args", dest="beam_pipeline_args", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = StatisticsGen(**_parsed_args) + + _output_serializers = [ + str, + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --examples-uri + - {inputValue: examples_uri} + - --output-statistics-uri + - {inputValue: output_statistics_uri} + - if: + cond: {isPresent: schema_uri} + then: + - --schema-uri + - {inputValue: schema_uri} + - if: + cond: {isPresent: stats_options_json} + then: + - --stats-options-json + - {inputValue: stats_options_json} + - if: + cond: {isPresent: beam_pipeline_args} + then: + - --beam-pipeline-args + - {inputValue: beam_pipeline_args} + - '----output-paths' + - {outputPath: statistics_uri} diff --git a/components/deprecated/tfx/Trainer/component.py b/components/deprecated/tfx/Trainer/component.py new file mode 100644 index 00000000000..1cd399bac49 --- /dev/null +++ b/components/deprecated/tfx/Trainer/component.py @@ -0,0 +1,161 @@ +# flake8: noqa TODO + +from kfp.components import InputPath, OutputPath + + +def Trainer( + examples_path: InputPath('Examples'), + schema_path: InputPath('Schema'), + + model_path: OutputPath('Model'), + + train_args: {'JsonObject': {'data_type': 'proto:tfx.components.trainer.TrainArgs'}}, + eval_args: {'JsonObject': {'data_type': 'proto:tfx.components.trainer.EvalArgs'}}, + module_file: str = None, + trainer_fn: str = None, + custom_config: dict = None, + + transform_graph_path: InputPath('TransformGraph') = None, + base_model_path: InputPath('Model') = None, + hyperparameters_path: InputPath('HyperParameters') = None, +): + """ + A TFX component to train a TensorFlow model. + + The Trainer component is used to train and eval a model using given inputs and + a user-supplied estimator. This component includes a custom driver to + optionally grab previous model to warm start from. + + ## Providing an estimator + The TFX executor will use the estimator provided in the `module_file` file + to train the model. The Trainer executor will look specifically for the + `trainer_fn()` function within that file. Before training, the executor will + call that function expecting the following returned as a dictionary: + + - estimator: The + [estimator](https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator) + to be used by TensorFlow to train the model. + - train_spec: The + [configuration](https://www.tensorflow.org/api_docs/python/tf/estimator/TrainSpec) + to be used by the "train" part of the TensorFlow `train_and_evaluate()` + call. + - eval_spec: The + [configuration](https://www.tensorflow.org/api_docs/python/tf/estimator/EvalSpec) + to be used by the "eval" part of the TensorFlow `train_and_evaluate()` call. + - eval_input_receiver_fn: The + [configuration](https://www.tensorflow.org/tfx/model_analysis/get_started#modify_an_existing_model) + to be used + by the [ModelValidator](https://www.tensorflow.org/tfx/guide/modelval) + component when validating the model. + + An example of `trainer_fn()` can be found in the [user-supplied + code]((https://github.com/tensorflow/tfx/blob/master/tfx/examples/chicago_taxi_pipeline/taxi_utils.py)) + of the TFX Chicago Taxi pipeline example. + + + Args: + examples: A Channel of 'Examples' type, serving as the source of + examples that are used in training (required). May be raw or + transformed. + transform_graph: An optional Channel of 'TransformGraph' type, serving as + the input transform graph if present. + schema: A Channel of 'SchemaPath' type, serving as the schema of training + and eval data. + module_file: A path to python module file containing UDF model definition. + The module_file must implement a function named `trainer_fn` at its + top level. The function must have the following signature. + + def trainer_fn(tf.contrib.training.HParams, + tensorflow_metadata.proto.v0.schema_pb2) -> Dict: + ... + + where the returned Dict has the following key-values. + 'estimator': an instance of tf.estimator.Estimator + 'train_spec': an instance of tf.estimator.TrainSpec + 'eval_spec': an instance of tf.estimator.EvalSpec + 'eval_input_receiver_fn': an instance of tfma.export.EvalInputReceiver + + Exactly one of 'module_file' or 'trainer_fn' must be supplied. + trainer_fn: A python path to UDF model definition function. See + 'module_file' for the required signature of the UDF. + Exactly one of 'module_file' or 'trainer_fn' must be supplied. + train_args: A trainer_pb2.TrainArgs instance, containing args used for + training. Current only num_steps is available. + eval_args: A trainer_pb2.EvalArgs instance, containing args used for eval. + Current only num_steps is available. + custom_config: A dict which contains the training job parameters to be + passed to Google Cloud ML Engine. For the full set of parameters + supported by Google Cloud ML Engine, refer to + https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#Job + Returns: + model: Optional 'Model' channel for result of exported models. + Raises: + ValueError: + - When both or neither of 'module_file' and 'trainer_fn' is supplied. + """ + from tfx.components.trainer.component import Trainer as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op( + Trainer, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/Trainer/component.yaml b/components/deprecated/tfx/Trainer/component.yaml new file mode 100644 index 00000000000..c8aad13307d --- /dev/null +++ b/components/deprecated/tfx/Trainer/component.yaml @@ -0,0 +1,323 @@ +name: Trainer +description: |- + A TFX component to train a TensorFlow model. + + The Trainer component is used to train and eval a model using given inputs and + a user-supplied estimator. This component includes a custom driver to + optionally grab previous model to warm start from. + + ## Providing an estimator + The TFX executor will use the estimator provided in the `module_file` file + to train the model. The Trainer executor will look specifically for the + `trainer_fn()` function within that file. Before training, the executor will + call that function expecting the following returned as a dictionary: + + - estimator: The + [estimator](https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator) + to be used by TensorFlow to train the model. + - train_spec: The + [configuration](https://www.tensorflow.org/api_docs/python/tf/estimator/TrainSpec) + to be used by the "train" part of the TensorFlow `train_and_evaluate()` + call. + - eval_spec: The + [configuration](https://www.tensorflow.org/api_docs/python/tf/estimator/EvalSpec) + to be used by the "eval" part of the TensorFlow `train_and_evaluate()` call. + - eval_input_receiver_fn: The + [configuration](https://www.tensorflow.org/tfx/model_analysis/get_started#modify_an_existing_model) + to be used + by the [ModelValidator](https://www.tensorflow.org/tfx/guide/modelval) + component when validating the model. + + An example of `trainer_fn()` can be found in the [user-supplied + code]((https://github.com/tensorflow/tfx/blob/master/tfx/examples/chicago_taxi_pipeline/taxi_utils.py)) + of the TFX Chicago Taxi pipeline example. + + + Args: + examples: A Channel of 'Examples' type, serving as the source of + examples that are used in training (required). May be raw or + transformed. + transform_graph: An optional Channel of 'TransformGraph' type, serving as + the input transform graph if present. + schema: A Channel of 'SchemaPath' type, serving as the schema of training + and eval data. + module_file: A path to python module file containing UDF model definition. + The module_file must implement a function named `trainer_fn` at its + top level. The function must have the following signature. + + def trainer_fn(tf.contrib.training.HParams, + tensorflow_metadata.proto.v0.schema_pb2) -> Dict: + ... + + where the returned Dict has the following key-values. + 'estimator': an instance of tf.estimator.Estimator + 'train_spec': an instance of tf.estimator.TrainSpec + 'eval_spec': an instance of tf.estimator.EvalSpec + 'eval_input_receiver_fn': an instance of tfma.export.EvalInputReceiver + + Exactly one of 'module_file' or 'trainer_fn' must be supplied. + trainer_fn: A python path to UDF model definition function. See + 'module_file' for the required signature of the UDF. + Exactly one of 'module_file' or 'trainer_fn' must be supplied. + train_args: A trainer_pb2.TrainArgs instance, containing args used for + training. Current only num_steps is available. + eval_args: A trainer_pb2.EvalArgs instance, containing args used for eval. + Current only num_steps is available. + custom_config: A dict which contains the training job parameters to be + passed to Google Cloud ML Engine. For the full set of parameters + supported by Google Cloud ML Engine, refer to + https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#Job + Returns: + model: Optional 'Model' channel for result of exported models. + Raises: + ValueError: + - When both or neither of 'module_file' and 'trainer_fn' is supplied. +inputs: +- {name: examples, type: Examples} +- {name: schema, type: Schema} +- name: train_args + type: + JsonObject: {data_type: 'proto:tfx.components.trainer.TrainArgs'} +- name: eval_args + type: + JsonObject: {data_type: 'proto:tfx.components.trainer.EvalArgs'} +- {name: module_file, type: String, optional: true} +- {name: trainer_fn, type: String, optional: true} +- {name: custom_config, type: JsonObject, optional: true} +- {name: transform_graph, type: TransformGraph, optional: true} +- {name: base_model, type: Model, optional: true} +- {name: hyperparameters, type: HyperParameters, optional: true} +outputs: +- {name: model, type: Model} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def Trainer( + examples_path , + schema_path , + + model_path , + + train_args , + eval_args , + module_file = None, + trainer_fn = None, + custom_config = None, + + transform_graph_path = None, + base_model_path = None, + hyperparameters_path = None, + ): + """ + A TFX component to train a TensorFlow model. + + The Trainer component is used to train and eval a model using given inputs and + a user-supplied estimator. This component includes a custom driver to + optionally grab previous model to warm start from. + + ## Providing an estimator + The TFX executor will use the estimator provided in the `module_file` file + to train the model. The Trainer executor will look specifically for the + `trainer_fn()` function within that file. Before training, the executor will + call that function expecting the following returned as a dictionary: + + - estimator: The + [estimator](https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator) + to be used by TensorFlow to train the model. + - train_spec: The + [configuration](https://www.tensorflow.org/api_docs/python/tf/estimator/TrainSpec) + to be used by the "train" part of the TensorFlow `train_and_evaluate()` + call. + - eval_spec: The + [configuration](https://www.tensorflow.org/api_docs/python/tf/estimator/EvalSpec) + to be used by the "eval" part of the TensorFlow `train_and_evaluate()` call. + - eval_input_receiver_fn: The + [configuration](https://www.tensorflow.org/tfx/model_analysis/get_started#modify_an_existing_model) + to be used + by the [ModelValidator](https://www.tensorflow.org/tfx/guide/modelval) + component when validating the model. + + An example of `trainer_fn()` can be found in the [user-supplied + code]((https://github.com/tensorflow/tfx/blob/master/tfx/examples/chicago_taxi_pipeline/taxi_utils.py)) + of the TFX Chicago Taxi pipeline example. + + Args: + examples: A Channel of 'Examples' type, serving as the source of + examples that are used in training (required). May be raw or + transformed. + transform_graph: An optional Channel of 'TransformGraph' type, serving as + the input transform graph if present. + schema: A Channel of 'SchemaPath' type, serving as the schema of training + and eval data. + module_file: A path to python module file containing UDF model definition. + The module_file must implement a function named `trainer_fn` at its + top level. The function must have the following signature. + + def trainer_fn(tf.contrib.training.HParams, + tensorflow_metadata.proto.v0.schema_pb2) -> Dict: + ... + + where the returned Dict has the following key-values. + 'estimator': an instance of tf.estimator.Estimator + 'train_spec': an instance of tf.estimator.TrainSpec + 'eval_spec': an instance of tf.estimator.EvalSpec + 'eval_input_receiver_fn': an instance of tfma.export.EvalInputReceiver + + Exactly one of 'module_file' or 'trainer_fn' must be supplied. + trainer_fn: A python path to UDF model definition function. See + 'module_file' for the required signature of the UDF. + Exactly one of 'module_file' or 'trainer_fn' must be supplied. + train_args: A trainer_pb2.TrainArgs instance, containing args used for + training. Current only num_steps is available. + eval_args: A trainer_pb2.EvalArgs instance, containing args used for eval. + Current only num_steps is available. + custom_config: A dict which contains the training job parameters to be + passed to Google Cloud ML Engine. For the full set of parameters + supported by Google Cloud ML Engine, refer to + https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#Job + Returns: + model: Optional 'Model' channel for result of exported models. + Raises: + ValueError: + - When both or neither of 'module_file' and 'trainer_fn' is supplied. + """ + from tfx.components.trainer.component import Trainer as component_class + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='Trainer', description='A TFX component to train a TensorFlow model.\n\n The Trainer component is used to train and eval a model using given inputs and\n a user-supplied estimator. This component includes a custom driver to\n optionally grab previous model to warm start from.\n\n ## Providing an estimator\n The TFX executor will use the estimator provided in the `module_file` file\n to train the model. The Trainer executor will look specifically for the\n `trainer_fn()` function within that file. Before training, the executor will\n call that function expecting the following returned as a dictionary:\n\n - estimator: The\n [estimator](https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator)\n to be used by TensorFlow to train the model.\n - train_spec: The\n [configuration](https://www.tensorflow.org/api_docs/python/tf/estimator/TrainSpec)\n to be used by the "train" part of the TensorFlow `train_and_evaluate()`\n call.\n - eval_spec: The\n [configuration](https://www.tensorflow.org/api_docs/python/tf/estimator/EvalSpec)\n to be used by the "eval" part of the TensorFlow `train_and_evaluate()` call.\n - eval_input_receiver_fn: The\n [configuration](https://www.tensorflow.org/tfx/model_analysis/get_started#modify_an_existing_model)\n to be used\n by the [ModelValidator](https://www.tensorflow.org/tfx/guide/modelval)\n component when validating the model.\n\n An example of `trainer_fn()` can be found in the [user-supplied\n code]((https://github.com/tensorflow/tfx/blob/master/tfx/examples/chicago_taxi_pipeline/taxi_utils.py))\n of the TFX Chicago Taxi pipeline example.\n\n\n Args:\n examples: A Channel of \'Examples\' type, serving as the source of\n examples that are used in training (required). May be raw or\n transformed.\n transform_graph: An optional Channel of \'TransformGraph\' type, serving as\n the input transform graph if present.\n schema: A Channel of \'SchemaPath\' type, serving as the schema of training\n and eval data.\n module_file: A path to python module file containing UDF model definition.\n The module_file must implement a function named `trainer_fn` at its\n top level. The function must have the following signature.\n\n def trainer_fn(tf.contrib.training.HParams,\n tensorflow_metadata.proto.v0.schema_pb2) -> Dict:\n ...\n\n where the returned Dict has the following key-values.\n \'estimator\': an instance of tf.estimator.Estimator\n \'train_spec\': an instance of tf.estimator.TrainSpec\n \'eval_spec\': an instance of tf.estimator.EvalSpec\n \'eval_input_receiver_fn\': an instance of tfma.export.EvalInputReceiver\n\n Exactly one of \'module_file\' or \'trainer_fn\' must be supplied.\n trainer_fn: A python path to UDF model definition function. See\n \'module_file\' for the required signature of the UDF.\n Exactly one of \'module_file\' or \'trainer_fn\' must be supplied.\n train_args: A trainer_pb2.TrainArgs instance, containing args used for\n training. Current only num_steps is available.\n eval_args: A trainer_pb2.EvalArgs instance, containing args used for eval.\n Current only num_steps is available.\n custom_config: A dict which contains the training job parameters to be\n passed to Google Cloud ML Engine. For the full set of parameters\n supported by Google Cloud ML Engine, refer to\n https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#Job\n Returns:\n model: Optional \'Model\' channel for result of exported models.\n Raises:\n ValueError:\n - When both or neither of \'module_file\' and \'trainer_fn\' is supplied.') + _parser.add_argument("--examples", dest="examples_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--schema", dest="schema_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--train-args", dest="train_args", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--eval-args", dest="eval_args", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--module-file", dest="module_file", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--trainer-fn", dest="trainer_fn", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--custom-config", dest="custom_config", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--transform-graph", dest="transform_graph_path", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--base-model", dest="base_model_path", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--hyperparameters", dest="hyperparameters_path", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--model", dest="model_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = Trainer(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --examples + - {inputPath: examples} + - --schema + - {inputPath: schema} + - --train-args + - {inputValue: train_args} + - --eval-args + - {inputValue: eval_args} + - if: + cond: {isPresent: module_file} + then: + - --module-file + - {inputValue: module_file} + - if: + cond: {isPresent: trainer_fn} + then: + - --trainer-fn + - {inputValue: trainer_fn} + - if: + cond: {isPresent: custom_config} + then: + - --custom-config + - {inputValue: custom_config} + - if: + cond: {isPresent: transform_graph} + then: + - --transform-graph + - {inputPath: transform_graph} + - if: + cond: {isPresent: base_model} + then: + - --base-model + - {inputPath: base_model} + - if: + cond: {isPresent: hyperparameters} + then: + - --hyperparameters + - {inputPath: hyperparameters} + - --model + - {outputPath: model} diff --git a/components/deprecated/tfx/Trainer/with_URI_IO/component.py b/components/deprecated/tfx/Trainer/with_URI_IO/component.py new file mode 100644 index 00000000000..4dd7c0cfbc9 --- /dev/null +++ b/components/deprecated/tfx/Trainer/with_URI_IO/component.py @@ -0,0 +1,105 @@ +# flake8: noqa + +from typing import NamedTuple + +def Trainer( + examples_uri: 'ExamplesUri', + schema_uri: 'SchemaUri', + output_model_uri: 'ModelUri', + train_args: {'JsonObject': {'data_type': 'proto:tfx.components.trainer.TrainArgs'}}, + eval_args: {'JsonObject': {'data_type': 'proto:tfx.components.trainer.EvalArgs'}}, + transform_graph_uri: 'TransformGraphUri' = None, + base_model_uri: 'ModelUri' = None, + hyperparameters_uri: 'HyperParametersUri' = None, + module_file: str = None, + run_fn: str = None, + trainer_fn: str = None, + custom_config: dict = None, + beam_pipeline_args: list = None, +) -> NamedTuple('Outputs', [ + ('model_uri', 'ModelUri'), +]): + from tfx.components import Trainer as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_model_uri, ) + + +if __name__ == '__main__': + import kfp + kfp.components.create_component_from_func( + Trainer, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/Trainer/with_URI_IO/component.yaml b/components/deprecated/tfx/Trainer/with_URI_IO/component.yaml new file mode 100644 index 00000000000..57e79a5ef3f --- /dev/null +++ b/components/deprecated/tfx/Trainer/with_URI_IO/component.yaml @@ -0,0 +1,208 @@ +name: Trainer +inputs: +- {name: examples_uri, type: ExamplesUri} +- {name: schema_uri, type: SchemaUri} +- {name: output_model_uri, type: ModelUri} +- name: train_args + type: + JsonObject: {data_type: 'proto:tfx.components.trainer.TrainArgs'} +- name: eval_args + type: + JsonObject: {data_type: 'proto:tfx.components.trainer.EvalArgs'} +- {name: transform_graph_uri, type: TransformGraphUri, optional: true} +- {name: base_model_uri, type: ModelUri, optional: true} +- {name: hyperparameters_uri, type: HyperParametersUri, optional: true} +- {name: module_file, type: String, optional: true} +- {name: run_fn, type: String, optional: true} +- {name: trainer_fn, type: String, optional: true} +- {name: custom_config, type: JsonObject, optional: true} +- {name: beam_pipeline_args, type: JsonArray, optional: true} +outputs: +- {name: model_uri, type: ModelUri} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def Trainer( + examples_uri, + schema_uri, + output_model_uri, + train_args, + eval_args, + transform_graph_uri = None, + base_model_uri = None, + hyperparameters_uri = None, + module_file = None, + run_fn = None, + trainer_fn = None, + custom_config = None, + beam_pipeline_args = None, + ): + from tfx.components import Trainer as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_model_uri, ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='Trainer', description='') + _parser.add_argument("--examples-uri", dest="examples_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--schema-uri", dest="schema_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-model-uri", dest="output_model_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--train-args", dest="train_args", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--eval-args", dest="eval_args", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--transform-graph-uri", dest="transform_graph_uri", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--base-model-uri", dest="base_model_uri", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--hyperparameters-uri", dest="hyperparameters_uri", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--module-file", dest="module_file", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--run-fn", dest="run_fn", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--trainer-fn", dest="trainer_fn", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--custom-config", dest="custom_config", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--beam-pipeline-args", dest="beam_pipeline_args", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = Trainer(**_parsed_args) + + _output_serializers = [ + str, + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --examples-uri + - {inputValue: examples_uri} + - --schema-uri + - {inputValue: schema_uri} + - --output-model-uri + - {inputValue: output_model_uri} + - --train-args + - {inputValue: train_args} + - --eval-args + - {inputValue: eval_args} + - if: + cond: {isPresent: transform_graph_uri} + then: + - --transform-graph-uri + - {inputValue: transform_graph_uri} + - if: + cond: {isPresent: base_model_uri} + then: + - --base-model-uri + - {inputValue: base_model_uri} + - if: + cond: {isPresent: hyperparameters_uri} + then: + - --hyperparameters-uri + - {inputValue: hyperparameters_uri} + - if: + cond: {isPresent: module_file} + then: + - --module-file + - {inputValue: module_file} + - if: + cond: {isPresent: run_fn} + then: + - --run-fn + - {inputValue: run_fn} + - if: + cond: {isPresent: trainer_fn} + then: + - --trainer-fn + - {inputValue: trainer_fn} + - if: + cond: {isPresent: custom_config} + then: + - --custom-config + - {inputValue: custom_config} + - if: + cond: {isPresent: beam_pipeline_args} + then: + - --beam-pipeline-args + - {inputValue: beam_pipeline_args} + - '----output-paths' + - {outputPath: model_uri} diff --git a/components/deprecated/tfx/Transform/component.py b/components/deprecated/tfx/Transform/component.py new file mode 100644 index 00000000000..f10551eab60 --- /dev/null +++ b/components/deprecated/tfx/Transform/component.py @@ -0,0 +1,132 @@ +# flake8: noqa TODO + +from kfp.components import InputPath, OutputPath + + +def Transform( + examples_path: InputPath('Examples'), + schema_path: InputPath('Schema'), + + transform_graph_path: OutputPath('TransformGraph'), + transformed_examples_path: OutputPath('Examples'), + + module_file: str = None, + preprocessing_fn: str = None, + custom_config: dict = None, +): + """A TFX component to transform the input examples. + + The Transform component wraps TensorFlow Transform (tf.Transform) to + preprocess data in a TFX pipeline. This component will load the + preprocessing_fn from input module file, preprocess both 'train' and 'eval' + splits of input examples, generate the `tf.Transform` output, and save both + transform function and transformed examples to orchestrator desired locations. + + ## Providing a preprocessing function + The TFX executor will use the estimator provided in the `module_file` file + to train the model. The Transform executor will look specifically for the + `preprocessing_fn()` function within that file. + + An example of `preprocessing_fn()` can be found in the [user-supplied + code]((https://github.com/tensorflow/tfx/blob/master/tfx/examples/chicago_taxi_pipeline/taxi_utils.py)) + of the TFX Chicago Taxi pipeline example. + + Args: + examples: A Channel of 'Examples' type (required). This should + contain the two splits 'train' and 'eval'. + schema: A Channel of 'SchemaPath' type. This should contain a single + schema artifact. + module_file: The file path to a python module file, from which the + 'preprocessing_fn' function will be loaded. The function must have the + following signature. + + def preprocessing_fn(inputs: Dict[Text, Any]) -> Dict[Text, Any]: + ... + + where the values of input and returned Dict are either tf.Tensor or + tf.SparseTensor. Exactly one of 'module_file' or 'preprocessing_fn' + must be supplied. + preprocessing_fn: The path to python function that implements a + 'preprocessing_fn'. See 'module_file' for expected signature of the + function. Exactly one of 'module_file' or 'preprocessing_fn' must + be supplied. + + Returns: + transform_graph: Optional output 'TransformPath' channel for output of + 'tf.Transform', which includes an exported Tensorflow graph suitable for + both training and serving; + transformed_examples: Optional output 'ExamplesPath' channel for + materialized transformed examples, which includes both 'train' and + 'eval' splits. + + Raises: + ValueError: When both or neither of 'module_file' and 'preprocessing_fn' + is supplied. + """ + from tfx.components.transform.component import Transform + component_class = Transform + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op( + Transform, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/Transform/component.yaml b/components/deprecated/tfx/Transform/component.yaml new file mode 100644 index 00000000000..024987d3d49 --- /dev/null +++ b/components/deprecated/tfx/Transform/component.yaml @@ -0,0 +1,242 @@ +name: Transform +description: |- + A TFX component to transform the input examples. + + The Transform component wraps TensorFlow Transform (tf.Transform) to + preprocess data in a TFX pipeline. This component will load the + preprocessing_fn from input module file, preprocess both 'train' and 'eval' + splits of input examples, generate the `tf.Transform` output, and save both + transform function and transformed examples to orchestrator desired locations. + + ## Providing a preprocessing function + The TFX executor will use the estimator provided in the `module_file` file + to train the model. The Transform executor will look specifically for the + `preprocessing_fn()` function within that file. + + An example of `preprocessing_fn()` can be found in the [user-supplied + code]((https://github.com/tensorflow/tfx/blob/master/tfx/examples/chicago_taxi_pipeline/taxi_utils.py)) + of the TFX Chicago Taxi pipeline example. + + Args: + examples: A Channel of 'Examples' type (required). This should + contain the two splits 'train' and 'eval'. + schema: A Channel of 'SchemaPath' type. This should contain a single + schema artifact. + module_file: The file path to a python module file, from which the + 'preprocessing_fn' function will be loaded. The function must have the + following signature. + + def preprocessing_fn(inputs: Dict[Text, Any]) -> Dict[Text, Any]: + ... + + where the values of input and returned Dict are either tf.Tensor or + tf.SparseTensor. Exactly one of 'module_file' or 'preprocessing_fn' + must be supplied. + preprocessing_fn: The path to python function that implements a + 'preprocessing_fn'. See 'module_file' for expected signature of the + function. Exactly one of 'module_file' or 'preprocessing_fn' must + be supplied. + + Returns: + transform_graph: Optional output 'TransformPath' channel for output of + 'tf.Transform', which includes an exported Tensorflow graph suitable for + both training and serving; + transformed_examples: Optional output 'ExamplesPath' channel for + materialized transformed examples, which includes both 'train' and + 'eval' splits. + + Raises: + ValueError: When both or neither of 'module_file' and 'preprocessing_fn' + is supplied. +inputs: +- {name: examples, type: Examples} +- {name: schema, type: Schema} +- {name: module_file, type: String, optional: true} +- {name: preprocessing_fn, type: String, optional: true} +- {name: custom_config, type: JsonObject, optional: true} +outputs: +- {name: transform_graph, type: TransformGraph} +- {name: transformed_examples, type: Examples} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def Transform( + examples_path , + schema_path , + + transform_graph_path , + transformed_examples_path , + + module_file = None, + preprocessing_fn = None, + custom_config = None, + ): + """A TFX component to transform the input examples. + + The Transform component wraps TensorFlow Transform (tf.Transform) to + preprocess data in a TFX pipeline. This component will load the + preprocessing_fn from input module file, preprocess both 'train' and 'eval' + splits of input examples, generate the `tf.Transform` output, and save both + transform function and transformed examples to orchestrator desired locations. + + ## Providing a preprocessing function + The TFX executor will use the estimator provided in the `module_file` file + to train the model. The Transform executor will look specifically for the + `preprocessing_fn()` function within that file. + + An example of `preprocessing_fn()` can be found in the [user-supplied + code]((https://github.com/tensorflow/tfx/blob/master/tfx/examples/chicago_taxi_pipeline/taxi_utils.py)) + of the TFX Chicago Taxi pipeline example. + + Args: + examples: A Channel of 'Examples' type (required). This should + contain the two splits 'train' and 'eval'. + schema: A Channel of 'SchemaPath' type. This should contain a single + schema artifact. + module_file: The file path to a python module file, from which the + 'preprocessing_fn' function will be loaded. The function must have the + following signature. + + def preprocessing_fn(inputs: Dict[Text, Any]) -> Dict[Text, Any]: + ... + + where the values of input and returned Dict are either tf.Tensor or + tf.SparseTensor. Exactly one of 'module_file' or 'preprocessing_fn' + must be supplied. + preprocessing_fn: The path to python function that implements a + 'preprocessing_fn'. See 'module_file' for expected signature of the + function. Exactly one of 'module_file' or 'preprocessing_fn' must + be supplied. + + Returns: + transform_graph: Optional output 'TransformPath' channel for output of + 'tf.Transform', which includes an exported Tensorflow graph suitable for + both training and serving; + transformed_examples: Optional output 'ExamplesPath' channel for + materialized transformed examples, which includes both 'train' and + 'eval' splits. + + Raises: + ValueError: When both or neither of 'module_file' and 'preprocessing_fn' + is supplied. + """ + from tfx.components.transform.component import Transform + component_class = Transform + + #Generated code + import json + import os + import tensorflow + from google.protobuf import json_format, message + from tfx.types import Artifact, channel_utils, artifact_utils + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value_obj = argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): # Maybe FIX: execution_parameter.type can also be a tuple + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments[name + '_path'] + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path + '/' # ? + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = {name: channel.get() for name, channel in component_class_instance.inputs.get_all().items()} + output_dict = {name: channel.get() for name, channel in component_class_instance.outputs.get_all().items()} + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments[name + '_path'] + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + #executor = component_class.EXECUTOR_SPEC.executor_class() # Same + executor = component_class_instance.executor_spec.executor_class() + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='Transform', description="A TFX component to transform the input examples.\n\n The Transform component wraps TensorFlow Transform (tf.Transform) to\n preprocess data in a TFX pipeline. This component will load the\n preprocessing_fn from input module file, preprocess both 'train' and 'eval'\n splits of input examples, generate the `tf.Transform` output, and save both\n transform function and transformed examples to orchestrator desired locations.\n\n ## Providing a preprocessing function\n The TFX executor will use the estimator provided in the `module_file` file\n to train the model. The Transform executor will look specifically for the\n `preprocessing_fn()` function within that file.\n\n An example of `preprocessing_fn()` can be found in the [user-supplied\n code]((https://github.com/tensorflow/tfx/blob/master/tfx/examples/chicago_taxi_pipeline/taxi_utils.py))\n of the TFX Chicago Taxi pipeline example.\n\n Args:\n examples: A Channel of 'Examples' type (required). This should\n contain the two splits 'train' and 'eval'.\n schema: A Channel of 'SchemaPath' type. This should contain a single\n schema artifact.\n module_file: The file path to a python module file, from which the\n 'preprocessing_fn' function will be loaded. The function must have the\n following signature.\n\n def preprocessing_fn(inputs: Dict[Text, Any]) -> Dict[Text, Any]:\n ...\n\n where the values of input and returned Dict are either tf.Tensor or\n tf.SparseTensor. Exactly one of 'module_file' or 'preprocessing_fn'\n must be supplied.\n preprocessing_fn: The path to python function that implements a\n 'preprocessing_fn'. See 'module_file' for expected signature of the\n function. Exactly one of 'module_file' or 'preprocessing_fn' must\n be supplied.\n\n Returns:\n transform_graph: Optional output 'TransformPath' channel for output of\n 'tf.Transform', which includes an exported Tensorflow graph suitable for\n both training and serving;\n transformed_examples: Optional output 'ExamplesPath' channel for\n materialized transformed examples, which includes both 'train' and\n 'eval' splits.\n\n Raises:\n ValueError: When both or neither of 'module_file' and 'preprocessing_fn'\n is supplied.") + _parser.add_argument("--examples", dest="examples_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--schema", dest="schema_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--module-file", dest="module_file", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--preprocessing-fn", dest="preprocessing_fn", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--custom-config", dest="custom_config", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--transform-graph", dest="transform_graph_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--transformed-examples", dest="transformed_examples_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = Transform(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --examples + - {inputPath: examples} + - --schema + - {inputPath: schema} + - if: + cond: {isPresent: module_file} + then: + - --module-file + - {inputValue: module_file} + - if: + cond: {isPresent: preprocessing_fn} + then: + - --preprocessing-fn + - {inputValue: preprocessing_fn} + - if: + cond: {isPresent: custom_config} + then: + - --custom-config + - {inputValue: custom_config} + - --transform-graph + - {outputPath: transform_graph} + - --transformed-examples + - {outputPath: transformed_examples} diff --git a/components/deprecated/tfx/Transform/with_URI_IO/component.py b/components/deprecated/tfx/Transform/with_URI_IO/component.py new file mode 100644 index 00000000000..9927c4cd844 --- /dev/null +++ b/components/deprecated/tfx/Transform/with_URI_IO/component.py @@ -0,0 +1,101 @@ +# flake8: noqa + +from typing import NamedTuple + +def Transform( + examples_uri: 'ExamplesUri', + schema_uri: 'SchemaUri', + output_transform_graph_uri: 'TransformGraphUri', + output_transformed_examples_uri: 'ExamplesUri', + module_file: str = None, + preprocessing_fn: str = None, + custom_config: dict = None, + beam_pipeline_args: list = None, +) -> NamedTuple('Outputs', [ + ('transform_graph_uri', 'TransformGraphUri'), + ('transformed_examples_uri', 'ExamplesUri'), +]): + from tfx.components import Transform as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_transform_graph_uri, output_transformed_examples_uri, ) + + +if __name__ == '__main__': + import kfp + kfp.components.create_component_from_func( + Transform, + base_image='tensorflow/tfx:0.21.4', + output_component_file='component.yaml' + ) diff --git a/components/deprecated/tfx/Transform/with_URI_IO/component.yaml b/components/deprecated/tfx/Transform/with_URI_IO/component.yaml new file mode 100644 index 00000000000..5a85caf7411 --- /dev/null +++ b/components/deprecated/tfx/Transform/with_URI_IO/component.yaml @@ -0,0 +1,170 @@ +name: Transform +inputs: +- {name: examples_uri, type: ExamplesUri} +- {name: schema_uri, type: SchemaUri} +- {name: output_transform_graph_uri, type: TransformGraphUri} +- {name: output_transformed_examples_uri, type: ExamplesUri} +- {name: module_file, type: String, optional: true} +- {name: preprocessing_fn, type: String, optional: true} +- {name: custom_config, type: JsonObject, optional: true} +- {name: beam_pipeline_args, type: JsonArray, optional: true} +outputs: +- {name: transform_graph_uri, type: TransformGraphUri} +- {name: transformed_examples_uri, type: ExamplesUri} +implementation: + container: + image: tensorflow/tfx:0.21.4 + command: + - python3 + - -u + - -c + - | + def Transform( + examples_uri, + schema_uri, + output_transform_graph_uri, + output_transformed_examples_uri, + module_file = None, + preprocessing_fn = None, + custom_config = None, + beam_pipeline_args = None, + ): + from tfx.components import Transform as component_class + + #Generated code + import json + import os + import tempfile + import tensorflow + from google.protobuf import json_format, message + from tfx.types import channel_utils, artifact_utils + from tfx.components.base import base_executor + + arguments = locals().copy() + + component_class_args = {} + + for name, execution_parameter in component_class.SPEC_CLASS.PARAMETERS.items(): + argument_value = arguments.get(name, None) + if argument_value is None: + continue + parameter_type = execution_parameter.type + if isinstance(parameter_type, type) and issubclass(parameter_type, message.Message): + argument_value_obj = parameter_type() + json_format.Parse(argument_value, argument_value_obj) + else: + argument_value_obj = argument_value + component_class_args[name] = argument_value_obj + + for name, channel_parameter in component_class.SPEC_CLASS.INPUTS.items(): + artifact_path = arguments.get(name + '_uri') or arguments.get(name + '_path') + if artifact_path: + artifact = channel_parameter.type() + artifact.uri = artifact_path.rstrip('/') + '/' # Some TFX components require that the artifact URIs end with a slash + if channel_parameter.type.PROPERTIES and 'split_names' in channel_parameter.type.PROPERTIES: + # Recovering splits + subdirs = tensorflow.io.gfile.listdir(artifact_path) + # Workaround for https://github.com/tensorflow/tensorflow/issues/39167 + subdirs = [subdir.rstrip('/') for subdir in subdirs] + artifact.split_names = artifact_utils.encode_split_names(sorted(subdirs)) + component_class_args[name] = channel_utils.as_channel([artifact]) + + component_class_instance = component_class(**component_class_args) + + input_dict = channel_utils.unwrap_channel_dict(component_class_instance.inputs.get_all()) + output_dict = channel_utils.unwrap_channel_dict(component_class_instance.outputs.get_all()) + exec_properties = component_class_instance.exec_properties + + # Generating paths for output artifacts + for name, artifacts in output_dict.items(): + base_artifact_path = arguments.get('output_' + name + '_uri') or arguments.get(name + '_path') + if base_artifact_path: + # Are there still cases where output channel has multiple artifacts? + for idx, artifact in enumerate(artifacts): + subdir = str(idx + 1) if idx > 0 else '' + artifact.uri = os.path.join(base_artifact_path, subdir) # Ends with '/' + + print('component instance: ' + str(component_class_instance)) + + # Workaround for a TFX+Beam bug to make DataflowRunner work. + # Remove after the next release that has https://github.com/tensorflow/tfx/commit/ddb01c02426d59e8bd541e3fd3cbaaf68779b2df + import tfx + tfx.version.__version__ += 'dev' + + executor_context = base_executor.BaseExecutor.Context( + beam_pipeline_args=beam_pipeline_args, + tmp_dir=tempfile.gettempdir(), + unique_id='tfx_component', + ) + executor = component_class_instance.executor_spec.executor_class(executor_context) + executor.Do( + input_dict=input_dict, + output_dict=output_dict, + exec_properties=exec_properties, + ) + + return (output_transform_graph_uri, output_transformed_examples_uri, ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='Transform', description='') + _parser.add_argument("--examples-uri", dest="examples_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--schema-uri", dest="schema_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-transform-graph-uri", dest="output_transform_graph_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-transformed-examples-uri", dest="output_transformed_examples_uri", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--module-file", dest="module_file", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--preprocessing-fn", dest="preprocessing_fn", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--custom-config", dest="custom_config", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--beam-pipeline-args", dest="beam_pipeline_args", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=2) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = Transform(**_parsed_args) + + _output_serializers = [ + str, + str, + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --examples-uri + - {inputValue: examples_uri} + - --schema-uri + - {inputValue: schema_uri} + - --output-transform-graph-uri + - {inputValue: output_transform_graph_uri} + - --output-transformed-examples-uri + - {inputValue: output_transformed_examples_uri} + - if: + cond: {isPresent: module_file} + then: + - --module-file + - {inputValue: module_file} + - if: + cond: {isPresent: preprocessing_fn} + then: + - --preprocessing-fn + - {inputValue: preprocessing_fn} + - if: + cond: {isPresent: custom_config} + then: + - --custom-config + - {inputValue: custom_config} + - if: + cond: {isPresent: beam_pipeline_args} + then: + - --beam-pipeline-args + - {inputValue: beam_pipeline_args} + - '----output-paths' + - {outputPath: transform_graph_uri} + - {outputPath: transformed_examples_uri} diff --git a/components/deprecated/tfx/_samples/TFX_Dataflow_pipeline.ipynb b/components/deprecated/tfx/_samples/TFX_Dataflow_pipeline.ipynb new file mode 100644 index 00000000000..417b15be210 --- /dev/null +++ b/components/deprecated/tfx/_samples/TFX_Dataflow_pipeline.ipynb @@ -0,0 +1,201 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### TFX Components\n", + "\n", + "This notebook shows how to create pipeline that uses TFX components:\n", + "\n", + "* CsvExampleGen\n", + "* StatisticsGen\n", + "* SchemaGen\n", + "* ExampleValidator\n", + "* Transform\n", + "* Trainer\n", + "* Evaluator" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Put your KFP cluster endpoint URL here if working from GCP notebooks (or local notebooks). ('https://xxxxx.notebooks.googleusercontent.com/')\n", + "kfp_endpoint='https://XXXXX.notebooks.googleusercontent.com/'\n", + "\n", + "# Replace with your GCS bucket, project ID and GCP region\n", + "root_output_uri = ''\n", + "project_id = ''\n", + "gcp_region = ''\n", + "\n", + "beam_pipeline_args = [\n", + " '--runner=DataflowRunner',\n", + " '--experiments=shuffle_mode=auto',\n", + " '--project=' + project_id,\n", + " '--temp_location=' + root_output_uri + '/tmp',\n", + " '--region=' + gcp_region,\n", + " '--disk_size_gb=50',\n", + "]\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "input_data_uri = 'gs://ml-pipeline-playground/tensorflow-tfx-repo/tfx/components/testdata/external/csv'\n", + "\n", + "#Only S3/GCS is supported for now.\n", + "module_file = 'gs://ml-pipeline-playground/tensorflow-tfx-repo/v0.21.4/tfx/examples/chicago_taxi_pipeline/taxi_utils.py'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import kfp" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "from kfp.components import load_component_from_url\n", + "\n", + "CsvExampleGen_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0cc4bbd4/components/tfx/ExampleGen/CsvExampleGen/with_URI_IO/component.yaml')\n", + "StatisticsGen_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0cc4bbd4/components/tfx/StatisticsGen/with_URI_IO/component.yaml')\n", + "SchemaGen_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0cc4bbd4/components/tfx/SchemaGen/with_URI_IO/component.yaml')\n", + "ExampleValidator_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0cc4bbd4/components/tfx/ExampleValidator/with_URI_IO/component.yaml')\n", + "Transform_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0cc4bbd4/components/tfx/Transform/with_URI_IO/component.yaml')\n", + "Trainer_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0cc4bbd4/components/tfx/Trainer/with_URI_IO/component.yaml')\n", + "Evaluator_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/0cc4bbd4/components/tfx/Evaluator/with_URI_IO/component.yaml')\n", + "\n", + "def tfx_pipeline(\n", + " input_data_uri,\n", + " root_output_uri,\n", + "):\n", + " generated_output_uri = root_output_uri + kfp.dsl.EXECUTION_ID_PLACEHOLDER\n", + "\n", + " examples_task = CsvExampleGen_op(\n", + " input_uri=input_data_uri,\n", + " input_config=json.dumps({\n", + " \"splits\": [\n", + " {'name': 'data', 'pattern': '*.csv'},\n", + " ]\n", + " }),\n", + " output_config=json.dumps({\n", + " \"splitConfig\": {\n", + " \"splits\": [\n", + " {'name': 'train', 'hash_buckets': 2},\n", + " {'name': 'eval', 'hash_buckets': 1},\n", + " ]\n", + " }\n", + " }),\n", + " beam_pipeline_args=beam_pipeline_args,\n", + "\n", + " output_examples_uri=generated_output_uri,\n", + " )\n", + " \n", + " statistics_task = StatisticsGen_op(\n", + " examples_uri=examples_task.outputs['examples_uri'],\n", + " beam_pipeline_args=beam_pipeline_args,\n", + "\n", + " output_statistics_uri=generated_output_uri,\n", + " )\n", + " \n", + " schema_task = SchemaGen_op(\n", + " statistics_uri=statistics_task.outputs['statistics_uri'],\n", + " beam_pipeline_args=beam_pipeline_args,\n", + "\n", + " output_schema_uri=generated_output_uri,\n", + " )\n", + "\n", + " # Performs anomaly detection based on statistics and data schema.\n", + " validator_task = ExampleValidator_op(\n", + " statistics_uri=statistics_task.outputs['statistics_uri'],\n", + " schema_uri=schema_task.outputs['schema_uri'],\n", + " beam_pipeline_args=beam_pipeline_args,\n", + "\n", + " output_anomalies_uri=generated_output_uri,\n", + " )\n", + "\n", + " # Performs transformations and feature engineering in training and serving.\n", + " transform_task = Transform_op(\n", + " examples_uri=examples_task.outputs['examples_uri'],\n", + " schema_uri=schema_task.outputs['schema_uri'],\n", + " module_file=module_file,\n", + " beam_pipeline_args=beam_pipeline_args,\n", + "\n", + " output_transform_graph_uri=generated_output_uri + '/transform_graph',\n", + " output_transformed_examples_uri=generated_output_uri + '/transformed_examples',\n", + " )\n", + "\n", + " trainer_task = Trainer_op(\n", + " module_file=module_file,\n", + " examples_uri=transform_task.outputs['transformed_examples_uri'],\n", + " schema_uri=schema_task.outputs['schema_uri'],\n", + " transform_graph_uri=transform_task.outputs['transform_graph_uri'],\n", + " train_args=json.dumps({'num_steps': 10000}),\n", + " eval_args=json.dumps({'num_steps': 5000}),\n", + " beam_pipeline_args=beam_pipeline_args,\n", + "\n", + " output_model_uri=generated_output_uri,\n", + " )\n", + "\n", + " # Uses TFMA to compute a evaluation statistics over features of a model.\n", + " model_analyzer = Evaluator_op(\n", + " examples_uri=examples_task.outputs['examples_uri'],\n", + " model_uri=trainer_task.outputs['model_uri'],\n", + " feature_slicing_spec=json.dumps({\n", + " 'specs': [\n", + " {'column_for_slicing': ['trip_start_hour']},\n", + " ],\n", + " }),\n", + " beam_pipeline_args=beam_pipeline_args,\n", + "\n", + " output_evaluation_uri=generated_output_uri + '/evaluation',\n", + " output_blessing_uri=generated_output_uri + '/blessing',\n", + " )\n", + "\n", + "\n", + "kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(\n", + " tfx_pipeline,\n", + " arguments=dict(\n", + " input_data_uri=input_data_uri,\n", + " root_output_uri=root_output_uri,\n", + " ),\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/components/deprecated/tfx/_samples/TFX_pipeline.ipynb b/components/deprecated/tfx/_samples/TFX_pipeline.ipynb new file mode 100644 index 00000000000..0398285ec5e --- /dev/null +++ b/components/deprecated/tfx/_samples/TFX_pipeline.ipynb @@ -0,0 +1,166 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### TFX Components\n", + "\n", + "This notebook shows how to create pipeline that uses TFX components:\n", + "\n", + "* CsvExampleGen\n", + "* StatisticsGen\n", + "* SchemaGen\n", + "* ExampleValidator\n", + "* Transform\n", + "* Trainer\n", + "* Evaluator" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Put your KFP cluster endpoint URL here if working from GCP notebooks (or local notebooks). ('https://xxxxx.notebooks.googleusercontent.com/')\n", + "kfp_endpoint='https://XXXXX.notebooks.googleusercontent.com/'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "input_data_uri = 'gs://ml-pipeline-playground/tensorflow-tfx-repo/tfx/components/testdata/external/csv'\n", + "\n", + "#Only S3/GCS is supported for now.\n", + "module_file = 'gs://ml-pipeline-playground/tensorflow-tfx-repo/v0.21.4/tfx/examples/chicago_taxi_pipeline/taxi_utils.py'" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import kfp" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import json\n", + "from kfp.components import load_component_from_url\n", + "\n", + "download_from_gcs_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/d013b8535666641ca5a5be6ce67e69e044bbf076/components/google-cloud/storage/download/component.yaml')\n", + "\n", + "CsvExampleGen_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/8c545b62/components/tfx/ExampleGen/CsvExampleGen/component.yaml')\n", + "StatisticsGen_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/8c545b62/components/tfx/StatisticsGen/component.yaml')\n", + "SchemaGen_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/8c545b62/components/tfx/SchemaGen/component.yaml')\n", + "ExampleValidator_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/8c545b62/components/tfx/ExampleValidator/component.yaml')\n", + "Transform_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/8c545b62/components/tfx/Transform/component.yaml')\n", + "Trainer_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/8c545b62/components/tfx/Trainer/component.yaml')\n", + "Evaluator_op = load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/8c545b62/components/tfx/Evaluator/component.yaml')\n", + "\n", + "def tfx_pipeline(\n", + " input_data_uri,\n", + "):\n", + " download_task = download_from_gcs_op(\n", + " input_data_uri,\n", + " )\n", + "\n", + " examples_task = CsvExampleGen_op(\n", + " input=download_task.output,\n", + " input_config=json.dumps({\n", + " \"splits\": [\n", + " {'name': 'data', 'pattern': '*.csv'},\n", + " ]\n", + " }),\n", + " output_config=json.dumps({\n", + " \"splitConfig\": {\n", + " \"splits\": [\n", + " {'name': 'train', 'hash_buckets': 2},\n", + " {'name': 'eval', 'hash_buckets': 1},\n", + " ]\n", + " }\n", + " }),\n", + " )\n", + " \n", + " statistics_task = StatisticsGen_op(\n", + " examples=examples_task.outputs['examples'],\n", + " )\n", + " \n", + " schema_task = SchemaGen_op(\n", + " statistics=statistics_task.outputs['statistics'],\n", + " )\n", + "\n", + " # Performs anomaly detection based on statistics and data schema.\n", + " validator_task = ExampleValidator_op(\n", + " statistics=statistics_task.outputs['statistics'],\n", + " schema=schema_task.outputs['schema'],\n", + " )\n", + "\n", + " # Performs transformations and feature engineering in training and serving.\n", + " transform_task = Transform_op(\n", + " examples=examples_task.outputs['examples'],\n", + " schema=schema_task.outputs['schema'],\n", + " module_file=module_file,\n", + " )\n", + "\n", + " trainer_task = Trainer_op(\n", + " module_file=module_file,\n", + " examples=transform_task.outputs['transformed_examples'],\n", + " schema=schema_task.outputs['schema'],\n", + " transform_graph=transform_task.outputs['transform_graph'],\n", + " train_args=json.dumps({'num_steps': 10000}),\n", + " eval_args=json.dumps({'num_steps': 5000}),\n", + " )\n", + "\n", + " # Uses TFMA to compute a evaluation statistics over features of a model.\n", + " model_analyzer = Evaluator_op(\n", + " examples=examples_task.outputs['examples'],\n", + " model=trainer_task.outputs['model'],\n", + " feature_slicing_spec=json.dumps({\n", + " 'specs': [\n", + " {'column_for_slicing': ['trip_start_hour']},\n", + " ],\n", + " }),\n", + " )\n", + "\n", + "\n", + "kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(\n", + " tfx_pipeline,\n", + " arguments=dict(\n", + " input_data_uri=input_data_uri,\n", + " ),\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.5.3" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} \ No newline at end of file diff --git a/components/diagnostics/diagnose_me/component.py b/components/diagnostics/diagnose_me/component.py new file mode 100644 index 00000000000..04f8baa7acb --- /dev/null +++ b/components/diagnostics/diagnose_me/component.py @@ -0,0 +1,231 @@ +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, List, NamedTuple, Optional + + +def run_diagnose_me( + bucket: str, + execution_mode: str, + project_id: str, + target_apis: str, + quota_check: list = None, +) -> NamedTuple('Outputs', [('bucket', str), ('project_id', str)]): + """ Performs environment verification specific to this pipeline. + + args: + bucket: + string name of the bucket to be checked. Must be of the format + gs://bucket_root/any/path/here/is/ignored where any path beyond root + is ignored. + execution_mode: + If set to HALT_ON_ERROR will case any error to raise an exception. + This is intended to stop the data processing of a pipeline. Can set + to False to only report Errors/Warnings. + project_id: + GCP project ID which is assumed to be the project under which + current pod is executing. + target_apis: + String consisting of a comma separated list of apis to be verified. + quota_check: + List of entries describing how much quota is required. Each entry + has three fields: region, metric and quota_needed. All + string-typed. + Raises: + RuntimeError: If configuration is not setup properly and + HALT_ON_ERROR flag is set. + """ + + # Installing pip3 and kfp, since the base image 'google/cloud-sdk:279.0.0' + # does not come with pip3 pre-installed. + import subprocess + subprocess.run([ + 'curl', 'https://bootstrap.pypa.io/get-pip.py', '-o', 'get-pip.py' + ], + capture_output=True) + subprocess.run(['apt-get', 'install', 'python3-distutils', '--yes'], + capture_output=True) + subprocess.run(['python3', 'get-pip.py'], capture_output=True) + subprocess.run(['python3', '-m', 'pip', 'install', 'kfp>=0.1.31', '--quiet'], + capture_output=True) + + import sys + from kfp.cli.diagnose_me import gcp + + config_error_observed = False + + quota_list = gcp.get_gcp_configuration( + gcp.Commands.GET_QUOTAS, human_readable=False + ) + + if quota_list.has_error: + print('Failed to retrieve project quota with error %s\n' % (quota_list.stderr)) + config_error_observed = True + else: + # Check quota. + quota_dict = {} # Mapping from region to dict[metric, available] + for region_quota in quota_list.json_output: + quota_dict[region_quota['name']] = {} + for quota in region_quota['quotas']: + quota_dict[region_quota['name']][quota['metric'] + ] = quota['limit'] - quota['usage'] + + quota_check = [] or quota_check + for single_check in quota_check: + if single_check['region'] not in quota_dict: + print( + 'Regional quota for %s does not exist in current project.\n' % + (single_check['region']) + ) + config_error_observed = True + else: + if quota_dict[single_check['region']][single_check['metric'] + ] < single_check['quota_needed']: + print( + 'Insufficient quota observed for %s at %s: %s is needed but only %s is available.\n' + % ( + single_check['metric'], single_check['region'], + str(single_check['quota_needed'] + ), str(quota_dict[single_check['region']][single_check['metric']]) + ) + ) + config_error_observed = True + + # Get the project ID + # from project configuration + project_config = gcp.get_gcp_configuration( + gcp.Commands.GET_GCLOUD_DEFAULT, human_readable=False + ) + if not project_config.has_error: + auth_project_id = project_config.parsed_output['core']['project'] + print( + 'GCP credentials are configured with access to project: %s ...\n' % + (project_id) + ) + print('Following account(s) are active under this pipeline:\n') + subprocess.run(['gcloud', 'auth', 'list', '--format', 'json']) + print('\n') + else: + print( + 'Project configuration is not accessible with error %s\n' % + (project_config.stderr), + file=sys.stderr + ) + config_error_observed = True + + if auth_project_id != project_id: + print( + 'User provided project ID %s does not match the configuration %s\n' % + (project_id, auth_project_id), + file=sys.stderr + ) + config_error_observed = True + + # Get project buckets + get_project_bucket_results = gcp.get_gcp_configuration( + gcp.Commands.GET_STORAGE_BUCKETS, human_readable=False + ) + + if get_project_bucket_results.has_error: + print( + 'could not retrieve project buckets with error: %s' % + (get_project_bucket_results.stderr), + file=sys.stderr + ) + config_error_observed = True + + # Get the root of the user provided bucket i.e. gs://root. + bucket_root = '/'.join(bucket.split('/')[0:3]) + + print( + 'Checking to see if the provided GCS bucket\n %s\nis accessible ...\n' % + (bucket) + ) + + if bucket_root in get_project_bucket_results.json_output: + print( + 'Provided bucket \n %s\nis accessible within the project\n %s\n' % + (bucket, project_id) + ) + + else: + print( + 'Could not find the bucket %s in project %s' % (bucket, project_id) + + 'Please verify that you have provided the correct GCS bucket name.\n' + + 'Only the following buckets are visible in this project:\n%s' % + (get_project_bucket_results.parsed_output), + file=sys.stderr + ) + config_error_observed = True + + # Verify APIs that are required are enabled + api_config_results = gcp.get_gcp_configuration(gcp.Commands.GET_APIS) + + api_status = {} + + if api_config_results.has_error: + print( + 'could not retrieve API status with error: %s' % + (api_config_results.stderr), + file=sys.stderr + ) + config_error_observed = True + + print('Checking APIs status ...') + for item in api_config_results.parsed_output: + api_status[item['config']['name']] = item['state'] + # printing the results in stdout for logging purposes + print('%s %s' % (item['config']['name'], item['state'])) + + # Check if target apis are enabled + api_check_results = True + for api in target_apis.replace(' ', '').split(','): + if 'ENABLED' != api_status.get(api, 'DISABLED'): + api_check_results = False + print( + 'API \"%s\" is not accessible or not enabled. To enable this api go to ' + % (api) + + 'https://console.cloud.google.com/apis/library/%s?project=%s' % + (api, project_id), + file=sys.stderr + ) + config_error_observed = True + + if 'HALT_ON_ERROR' in execution_mode and config_error_observed: + raise RuntimeError( + 'There was an error in your environment configuration.\n' + + 'Note that resolving such issues generally require a deep knowledge of Kubernetes.\n' + + '\n' + + 'We highly recommend that you recreate the cluster and check "Allow access ..." \n' + + + 'checkbox during cluster creation to have the cluster configured automatically.\n' + + + 'For more information on this and other troubleshooting instructions refer to\n' + + 'our troubleshooting guide.\n' + '\n' + + 'If you have intentionally modified the cluster configuration, you may\n' + + + 'bypass this error by removing the execution_mode HALT_ON_ERROR flag.\n' + ) + + return (project_id, bucket) + + +if __name__ == '__main__': + import kfp.components as comp + + comp.func_to_container_op( + run_diagnose_me, + base_image='google/cloud-sdk:279.0.0', + output_component_file='component.yaml', + ) diff --git a/components/diagnostics/diagnose_me/component.yaml b/components/diagnostics/diagnose_me/component.yaml new file mode 100644 index 00000000000..48538ab91fe --- /dev/null +++ b/components/diagnostics/diagnose_me/component.yaml @@ -0,0 +1,311 @@ +name: Run diagnose me +description: |- + Performs environment verification specific to this pipeline. + + args: + bucket: + string name of the bucket to be checked. Must be of the format + gs://bucket_root/any/path/here/is/ignored where any path beyond root + is ignored. + execution_mode: + If set to HALT_ON_ERROR will case any error to raise an exception. + This is intended to stop the data processing of a pipeline. Can set + to False to only report Errors/Warnings. + project_id: + GCP project ID which is assumed to be the project under which + current pod is executing. + target_apis: + String consisting of a comma separated list of apis to be verified. + quota_check: + List of entries describing how much quota is required. Each entry + has three fields: region, metric and quota_needed. All + string-typed. + Raises: + RuntimeError: If configuration is not setup properly and + HALT_ON_ERROR flag is set. +inputs: +- name: bucket + type: String +- name: execution_mode + type: String +- name: project_id + type: String +- name: target_apis + type: String +- name: quota_check + type: JsonArray + optional: true +outputs: +- name: bucket + type: String +- name: project_id + type: String +implementation: + container: + image: google/cloud-sdk:279.0.0 + command: + - python3 + - -u + - -c + - | + from typing import NamedTuple + + def run_diagnose_me( + bucket: str, + execution_mode: str, + project_id: str, + target_apis: str, + quota_check: list = None, + ) -> NamedTuple('Outputs', [('bucket', str), ('project_id', str)]): + """ Performs environment verification specific to this pipeline. + + args: + bucket: + string name of the bucket to be checked. Must be of the format + gs://bucket_root/any/path/here/is/ignored where any path beyond root + is ignored. + execution_mode: + If set to HALT_ON_ERROR will case any error to raise an exception. + This is intended to stop the data processing of a pipeline. Can set + to False to only report Errors/Warnings. + project_id: + GCP project ID which is assumed to be the project under which + current pod is executing. + target_apis: + String consisting of a comma separated list of apis to be verified. + quota_check: + List of entries describing how much quota is required. Each entry + has three fields: region, metric and quota_needed. All + string-typed. + Raises: + RuntimeError: If configuration is not setup properly and + HALT_ON_ERROR flag is set. + """ + + # Installing pip3 and kfp, since the base image 'google/cloud-sdk:279.0.0' + # does not come with pip3 pre-installed. + import subprocess + subprocess.run([ + 'curl', 'https://bootstrap.pypa.io/get-pip.py', '-o', 'get-pip.py' + ], + capture_output=True) + subprocess.run(['apt-get', 'install', 'python3-distutils', '--yes'], + capture_output=True) + subprocess.run(['python3', 'get-pip.py'], capture_output=True) + subprocess.run(['python3', '-m', 'pip', 'install', 'kfp>=0.1.31', '--quiet'], + capture_output=True) + + import sys + from kfp.cli.diagnose_me import gcp + + config_error_observed = False + + quota_list = gcp.get_gcp_configuration( + gcp.Commands.GET_QUOTAS, human_readable=False + ) + + if quota_list.has_error: + print('Failed to retrieve project quota with error %s\n' % (quota_list.stderr)) + config_error_observed = True + else: + # Check quota. + quota_dict = {} # Mapping from region to dict[metric, available] + for region_quota in quota_list.json_output: + quota_dict[region_quota['name']] = {} + for quota in region_quota['quotas']: + quota_dict[region_quota['name']][quota['metric'] + ] = quota['limit'] - quota['usage'] + + quota_check = [] or quota_check + for single_check in quota_check: + if single_check['region'] not in quota_dict: + print( + 'Regional quota for %s does not exist in current project.\n' % + (single_check['region']) + ) + config_error_observed = True + else: + if quota_dict[single_check['region']][single_check['metric'] + ] < single_check['quota_needed']: + print( + 'Insufficient quota observed for %s at %s: %s is needed but only %s is available.\n' + % ( + single_check['metric'], single_check['region'], + str(single_check['quota_needed'] + ), str(quota_dict[single_check['region']][single_check['metric']]) + ) + ) + config_error_observed = True + + # Get the project ID + # from project configuration + project_config = gcp.get_gcp_configuration( + gcp.Commands.GET_GCLOUD_DEFAULT, human_readable=False + ) + if not project_config.has_error: + auth_project_id = project_config.parsed_output['core']['project'] + print( + 'GCP credentials are configured with access to project: %s ...\n' % + (project_id) + ) + print('Following account(s) are active under this pipeline:\n') + subprocess.run(['gcloud', 'auth', 'list', '--format', 'json']) + print('\n') + else: + print( + 'Project configuration is not accessible with error %s\n' % + (project_config.stderr), + file=sys.stderr + ) + config_error_observed = True + + if auth_project_id != project_id: + print( + 'User provided project ID %s does not match the configuration %s\n' % + (project_id, auth_project_id), + file=sys.stderr + ) + config_error_observed = True + + # Get project buckets + get_project_bucket_results = gcp.get_gcp_configuration( + gcp.Commands.GET_STORAGE_BUCKETS, human_readable=False + ) + + if get_project_bucket_results.has_error: + print( + 'could not retrieve project buckets with error: %s' % + (get_project_bucket_results.stderr), + file=sys.stderr + ) + config_error_observed = True + + # Get the root of the user provided bucket i.e. gs://root. + bucket_root = '/'.join(bucket.split('/')[0:3]) + + print( + 'Checking to see if the provided GCS bucket\n %s\nis accessible ...\n' % + (bucket) + ) + + if bucket_root in get_project_bucket_results.json_output: + print( + 'Provided bucket \n %s\nis accessible within the project\n %s\n' % + (bucket, project_id) + ) + + else: + print( + 'Could not find the bucket %s in project %s' % (bucket, project_id) + + 'Please verify that you have provided the correct GCS bucket name.\n' + + 'Only the following buckets are visible in this project:\n%s' % + (get_project_bucket_results.parsed_output), + file=sys.stderr + ) + config_error_observed = True + + # Verify APIs that are required are enabled + api_config_results = gcp.get_gcp_configuration(gcp.Commands.GET_APIS) + + api_status = {} + + if api_config_results.has_error: + print( + 'could not retrieve API status with error: %s' % + (api_config_results.stderr), + file=sys.stderr + ) + config_error_observed = True + + print('Checking APIs status ...') + for item in api_config_results.parsed_output: + api_status[item['config']['name']] = item['state'] + # printing the results in stdout for logging purposes + print('%s %s' % (item['config']['name'], item['state'])) + + # Check if target apis are enabled + api_check_results = True + for api in target_apis.replace(' ', '').split(','): + if 'ENABLED' != api_status.get(api, 'DISABLED'): + api_check_results = False + print( + 'API \"%s\" is not accessible or not enabled. To enable this api go to ' + % (api) + + 'https://console.cloud.google.com/apis/library/%s?project=%s' % + (api, project_id), + file=sys.stderr + ) + config_error_observed = True + + if 'HALT_ON_ERROR' in execution_mode and config_error_observed: + raise RuntimeError( + 'There was an error in your environment configuration.\n' + + 'Note that resolving such issues generally require a deep knowledge of Kubernetes.\n' + + '\n' + + 'We highly recommend that you recreate the cluster and check "Allow access ..." \n' + + + 'checkbox during cluster creation to have the cluster configured automatically.\n' + + + 'For more information on this and other troubleshooting instructions refer to\n' + + 'our troubleshooting guide.\n' + '\n' + + 'If you have intentionally modified the cluster configuration, you may\n' + + + 'bypass this error by removing the execution_mode HALT_ON_ERROR flag.\n' + ) + + return (project_id, bucket) + + def _serialize_str(str_value: str) -> str: + if not isinstance(str_value, str): + raise TypeError('Value "{}" has type "{}" instead of str.'.format(str(str_value), str(type(str_value)))) + return str_value + + import json + import argparse + _parser = argparse.ArgumentParser(prog='Run diagnose me', description='Performs environment verification specific to this pipeline.\n\n args:\n bucket:\n string name of the bucket to be checked. Must be of the format\n gs://bucket_root/any/path/here/is/ignored where any path beyond root\n is ignored.\n execution_mode:\n If set to HALT_ON_ERROR will case any error to raise an exception.\n This is intended to stop the data processing of a pipeline. Can set\n to False to only report Errors/Warnings.\n project_id:\n GCP project ID which is assumed to be the project under which\n current pod is executing.\n target_apis:\n String consisting of a comma separated list of apis to be verified.\n quota_check:\n List of entries describing how much quota is required. Each entry\n has three fields: region, metric and quota_needed. All\n string-typed.\n Raises:\n RuntimeError: If configuration is not setup properly and\n HALT_ON_ERROR flag is set.') + _parser.add_argument("--bucket", dest="bucket", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--execution-mode", dest="execution_mode", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--project-id", dest="project_id", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--target-apis", dest="target_apis", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--quota-check", dest="quota_check", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=2) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = run_diagnose_me(**_parsed_args) + + if not hasattr(_outputs, '__getitem__') or isinstance(_outputs, str): + _outputs = [_outputs] + + _output_serializers = [ + _serialize_str, + _serialize_str, + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --bucket + - inputValue: bucket + - --execution-mode + - inputValue: execution_mode + - --project-id + - inputValue: project_id + - --target-apis + - inputValue: target_apis + - if: + cond: + isPresent: quota_check + then: + - --quota-check + - inputValue: quota_check + - '----output-paths' + - outputPath: bucket + - outputPath: project_id diff --git a/components/filesystem/get_file/component.yaml b/components/filesystem/get_file/component.yaml new file mode 100644 index 00000000000..f524751fc48 --- /dev/null +++ b/components/filesystem/get_file/component.yaml @@ -0,0 +1,20 @@ +name: Get file +description: Get file from directory. +inputs: +- {name: Directory, type: Directory} +- {name: Subpath, type: String} +outputs: +- {name: File} +implementation: + container: + image: alpine + command: + - sh + - -ex + - -c + - | + mkdir -p "$(dirname "$2")" + cp -r "$0/$1" "$2" + - inputPath: Directory + - inputValue: Subpath + - outputPath: File diff --git a/components/filesystem/get_subdirectory/component.yaml b/components/filesystem/get_subdirectory/component.yaml new file mode 100644 index 00000000000..3f5fb34b862 --- /dev/null +++ b/components/filesystem/get_subdirectory/component.yaml @@ -0,0 +1,20 @@ +name: Get subdirectory +description: Get subdirectory from directory. +inputs: +- {name: Directory, type: Directory} +- {name: Subpath, type: String} +outputs: +- {name: Subdir, type: Directory} +implementation: + container: + image: alpine + command: + - sh + - -ex + - -c + - | + mkdir -p "$(dirname "$2")" + cp -r "$0/$1" "$2" + - inputPath: Directory + - inputValue: Subpath + - outputPath: Subdir diff --git a/components/filesystem/list_items/component.yaml b/components/filesystem/list_items/component.yaml new file mode 100644 index 00000000000..7bb41b37a13 --- /dev/null +++ b/components/filesystem/list_items/component.yaml @@ -0,0 +1,19 @@ +name: List items +description: Recursively list directory contents. +inputs: +- {name: Directory, type: Directory} +outputs: +- {name: Items} +implementation: + container: + image: alpine + command: + - sh + - -ex + - -c + - | + mkdir -p "$(dirname "$1")" + #ls --almost-all --recursive "$0" > "$1" + ls -A -R "$0" > "$1" + - inputPath: Directory + - outputPath: Items diff --git a/components/gcp/automl/create_dataset_for_tables/component.py b/components/gcp/automl/create_dataset_for_tables/component.py index 644fd647509..9239e780b15 100644 --- a/components/gcp/automl/create_dataset_for_tables/component.py +++ b/components/gcp/automl/create_dataset_for_tables/component.py @@ -24,13 +24,9 @@ def automl_create_dataset_for_tables( retry=None, #=google.api_core.gapic_v1.method.DEFAULT, timeout: float = None, #=google.api_core.gapic_v1.method.DEFAULT, metadata: dict = None, -) -> NamedTuple('Outputs', [('dataset_path', str), ('create_time', str), ('dataset_id', str)]): +) -> NamedTuple('Outputs', [('dataset_path', str), ('create_time', str), ('dataset_id', str), ('dataset_url', 'URI')]): '''automl_create_dataset_for_tables creates an empty Dataset for AutoML tables ''' - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - import google from google.cloud import automl client = automl.AutoMlClient() @@ -50,9 +46,19 @@ def automl_create_dataset_for_tables( ) print(dataset) dataset_id = dataset.name.rsplit('/', 1)[-1] - return (dataset.name, dataset.create_time, dataset_id) + dataset_url = 'https://console.cloud.google.com/automl-tables/locations/{region}/datasets/{dataset_id}/schemav2?project={project_id}'.format( + project_id=gcp_project_id, + region=gcp_region, + dataset_id=dataset_id, + ) + return (dataset.name, dataset.create_time, dataset_id, dataset_url) if __name__ == '__main__': import kfp - kfp.components.func_to_container_op(automl_create_dataset_for_tables, output_component_file='component.yaml', base_image='python:3.7') + kfp.components.func_to_container_op( + automl_create_dataset_for_tables, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['google-cloud-automl==0.4.0'] + ) diff --git a/components/gcp/automl/create_dataset_for_tables/component.yaml b/components/gcp/automl/create_dataset_for_tables/component.yaml index 4999a022c68..74257db9fdd 100644 --- a/components/gcp/automl/create_dataset_for_tables/component.yaml +++ b/components/gcp/automl/create_dataset_for_tables/component.yaml @@ -1,71 +1,46 @@ -description: | - automl_create_dataset_for_tables creates an empty Dataset for AutoML tables +name: Automl create dataset for tables +description: automl_create_dataset_for_tables creates an empty Dataset for AutoML + tables +inputs: +- {name: gcp_project_id, type: String} +- {name: gcp_region, type: String} +- {name: display_name, type: String} +- {name: description, type: String, optional: true} +- {name: tables_dataset_metadata, type: JsonObject, default: '{}', optional: true} +- {name: retry, optional: true} +- {name: timeout, type: Float, optional: true} +- {name: metadata, type: JsonObject, optional: true} +outputs: +- {name: dataset_path, type: String} +- {name: create_time, type: String} +- {name: dataset_id, type: String} +- {name: dataset_url, type: URI} implementation: container: - args: - - --gcp-project-id - - inputValue: gcp_project_id - - --gcp-region - - inputValue: gcp_region - - --display-name - - inputValue: display_name - - if: - cond: - isPresent: description - then: - - --description - - inputValue: description - - if: - cond: - isPresent: tables_dataset_metadata - then: - - --tables-dataset-metadata - - inputValue: tables_dataset_metadata - - if: - cond: - isPresent: retry - then: - - --retry - - inputValue: retry - - if: - cond: - isPresent: timeout - then: - - --timeout - - inputValue: timeout - - if: - cond: - isPresent: metadata - then: - - --metadata - - inputValue: metadata - - '----output-paths' - - outputPath: dataset_path - - outputPath: create_time - - outputPath: dataset_id + image: python:3.7 command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'google-cloud-automl==0.4.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip + install --quiet --no-warn-script-location 'google-cloud-automl==0.4.0' --user) + && "$0" "$@" - python3 - -u - -c - | - from typing import NamedTuple - def automl_create_dataset_for_tables( - gcp_project_id: str, - gcp_region: str, - display_name: str, - description: str = None, - tables_dataset_metadata: dict = {}, + gcp_project_id , + gcp_region , + display_name , + description = None, + tables_dataset_metadata = {}, retry=None, #=google.api_core.gapic_v1.method.DEFAULT, - timeout: float = None, #=google.api_core.gapic_v1.method.DEFAULT, - metadata: dict = None, - ) -> NamedTuple('Outputs', [('dataset_path', str), ('create_time', str), ('dataset_id', str)]): + timeout = None, #=google.api_core.gapic_v1.method.DEFAULT, + metadata = None, + ) : '''automl_create_dataset_for_tables creates an empty Dataset for AutoML tables ''' - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - import google from google.cloud import automl client = automl.AutoMlClient() @@ -85,28 +60,42 @@ implementation: ) print(dataset) dataset_id = dataset.name.rsplit('/', 1)[-1] - return (dataset.name, dataset.create_time, dataset_id) + dataset_url = 'https://console.cloud.google.com/automl-tables/locations/{region}/datasets/{dataset_id}/schemav2?project={project_id}'.format( + project_id=gcp_project_id, + region=gcp_region, + dataset_id=dataset_id, + ) + return (dataset.name, dataset.create_time, dataset_id, dataset_url) import json + def _serialize_str(str_value: str) -> str: + if not isinstance(str_value, str): + raise TypeError('Value "{}" has type "{}" instead of str.'.format(str(str_value), str(type(str_value)))) + return str_value + import argparse - _missing_arg = object() - _parser = argparse.ArgumentParser(prog='Automl create dataset for tables', description='automl_create_dataset_for_tables creates an empty Dataset for AutoML tables\n') - _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=True, default=_missing_arg) - _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=True, default=_missing_arg) - _parser.add_argument("--display-name", dest="display_name", type=str, required=True, default=_missing_arg) - _parser.add_argument("--description", dest="description", type=str, required=False, default=_missing_arg) - _parser.add_argument("--tables-dataset-metadata", dest="tables_dataset_metadata", type=json.loads, required=False, default=_missing_arg) - _parser.add_argument("--retry", dest="retry", type=str, required=False, default=_missing_arg) - _parser.add_argument("--timeout", dest="timeout", type=float, required=False, default=_missing_arg) - _parser.add_argument("--metadata", dest="metadata", type=json.loads, required=False, default=_missing_arg) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=3) - _parsed_args = {k: v for k, v in vars(_parser.parse_args()).items() if v is not _missing_arg} + _parser = argparse.ArgumentParser(prog='Automl create dataset for tables', description='automl_create_dataset_for_tables creates an empty Dataset for AutoML tables') + _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--display-name", dest="display_name", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--description", dest="description", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--tables-dataset-metadata", dest="tables_dataset_metadata", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--retry", dest="retry", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--timeout", dest="timeout", type=float, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--metadata", dest="metadata", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=4) + _parsed_args = vars(_parser.parse_args()) _output_files = _parsed_args.pop("_output_paths", []) _outputs = automl_create_dataset_for_tables(**_parsed_args) - if not hasattr(_outputs, '__getitem__') or isinstance(_outputs, str): - _outputs = [_outputs] + _output_serializers = [ + _serialize_str, + _serialize_str, + _serialize_str, + str, + + ] import os for idx, output_file in enumerate(_output_files): @@ -115,35 +104,41 @@ implementation: except OSError: pass with open(output_file, 'w') as f: - f.write(str(_outputs[idx])) - image: python:3.7 -inputs: -- name: gcp_project_id - type: String -- name: gcp_region - type: String -- name: display_name - type: String -- name: description - optional: true - type: String -- default: '{}' - name: tables_dataset_metadata - optional: true - type: JsonObject -- name: retry - optional: true -- name: timeout - optional: true - type: Float -- name: metadata - optional: true - type: JsonObject -name: Automl create dataset for tables -outputs: -- name: dataset_path - type: String -- name: create_time - type: String -- name: dataset_id - type: String + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --gcp-project-id + - {inputValue: gcp_project_id} + - --gcp-region + - {inputValue: gcp_region} + - --display-name + - {inputValue: display_name} + - if: + cond: {isPresent: description} + then: + - --description + - {inputValue: description} + - if: + cond: {isPresent: tables_dataset_metadata} + then: + - --tables-dataset-metadata + - {inputValue: tables_dataset_metadata} + - if: + cond: {isPresent: retry} + then: + - --retry + - {inputValue: retry} + - if: + cond: {isPresent: timeout} + then: + - --timeout + - {inputValue: timeout} + - if: + cond: {isPresent: metadata} + then: + - --metadata + - {inputValue: metadata} + - '----output-paths' + - {outputPath: dataset_path} + - {outputPath: create_time} + - {outputPath: dataset_id} + - {outputPath: dataset_url} diff --git a/components/gcp/automl/create_model_for_tables/component.py b/components/gcp/automl/create_model_for_tables/component.py index 17bc681e7b6..205a4a064c0 100644 --- a/components/gcp/automl/create_model_for_tables/component.py +++ b/components/gcp/automl/create_model_for_tables/component.py @@ -24,24 +24,20 @@ def automl_create_model_for_tables( input_feature_column_paths: list = None, optimization_objective: str = 'MAXIMIZE_AU_PRC', train_budget_milli_node_hours: int = 1000, -) -> NamedTuple('Outputs', [('model_path', str), ('model_id', str)]): - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - +) -> NamedTuple('Outputs', [('model_path', str), ('model_id', str), ('model_page_url', 'URI'),]): from google.cloud import automl client = automl.AutoMlClient() location_path = client.location_path(gcp_project_id, gcp_region) model_dict = { 'display_name': display_name, - 'dataset_id': dataset_id, + 'dataset_id': dataset_id, 'tables_model_metadata': { 'target_column_spec': automl.types.ColumnSpec(name=target_column_path), 'input_feature_column_specs': [automl.types.ColumnSpec(name=path) for path in input_feature_column_paths] if input_feature_column_paths else None, 'optimization_objective': optimization_objective, 'train_budget_milli_node_hours': train_budget_milli_node_hours, - }, + }, } create_model_response = client.create_model(location_path, model_dict) @@ -50,9 +46,21 @@ def automl_create_model_for_tables( print(result) model_name = result.name model_id = model_name.rsplit('/', 1)[-1] - return (model_name, model_id) + model_url = 'https://console.cloud.google.com/automl-tables/locations/{region}/datasets/{dataset_id};modelId={model_id};task=basic/train?project={project_id}'.format( + project_id=gcp_project_id, + region=gcp_region, + dataset_id=dataset_id, + model_id=model_id, + ) + + return (model_name, model_id, model_url) if __name__ == '__main__': import kfp - kfp.components.func_to_container_op(automl_create_model_for_tables, output_component_file='component.yaml', base_image='python:3.7') + kfp.components.func_to_container_op( + automl_create_model_for_tables, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['google-cloud-automl==0.4.0'] + ) diff --git a/components/gcp/automl/create_model_for_tables/component.yaml b/components/gcp/automl/create_model_for_tables/component.yaml index 3bcc0fde2c9..8b909eb1ee8 100644 --- a/components/gcp/automl/create_model_for_tables/component.yaml +++ b/components/gcp/automl/create_model_for_tables/component.yaml @@ -1,62 +1,41 @@ +name: Automl create model for tables +inputs: +- {name: gcp_project_id, type: String} +- {name: gcp_region, type: String} +- {name: display_name, type: String} +- {name: dataset_id, type: String} +- {name: target_column_path, type: String, optional: true} +- {name: input_feature_column_paths, type: JsonArray, optional: true} +- {name: optimization_objective, type: String, default: MAXIMIZE_AU_PRC, optional: true} +- {name: train_budget_milli_node_hours, type: Integer, default: '1000', optional: true} +outputs: +- {name: model_path, type: String} +- {name: model_id, type: String} +- {name: model_page_url, type: URI} implementation: container: - args: - - --gcp-project-id - - inputValue: gcp_project_id - - --gcp-region - - inputValue: gcp_region - - --display-name - - inputValue: display_name - - --dataset-id - - inputValue: dataset_id - - if: - cond: - isPresent: target_column_path - then: - - --target-column-path - - inputValue: target_column_path - - if: - cond: - isPresent: input_feature_column_paths - then: - - --input-feature-column-paths - - inputValue: input_feature_column_paths - - if: - cond: - isPresent: optimization_objective - then: - - --optimization-objective - - inputValue: optimization_objective - - if: - cond: - isPresent: train_budget_milli_node_hours - then: - - --train-budget-milli-node-hours - - inputValue: train_budget_milli_node_hours - - '----output-paths' - - outputPath: model_path - - outputPath: model_id + image: python:3.7 command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'google-cloud-automl==0.4.0' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip + install --quiet --no-warn-script-location 'google-cloud-automl==0.4.0' --user) + && "$0" "$@" - python3 - -u - -c - | - from typing import NamedTuple - def automl_create_model_for_tables( - gcp_project_id: str, - gcp_region: str, - display_name: str, - dataset_id: str, - target_column_path: str = None, - input_feature_column_paths: list = None, - optimization_objective: str = 'MAXIMIZE_AU_PRC', - train_budget_milli_node_hours: int = 1000, - ) -> NamedTuple('Outputs', [('model_path', str), ('model_id', str)]): - import sys - import subprocess - subprocess.run([sys.executable, '-m', 'pip', 'install', 'google-cloud-automl==0.4.0', '--quiet', '--no-warn-script-location'], env={'PIP_DISABLE_PIP_VERSION_CHECK': '1'}, check=True) - + gcp_project_id , + gcp_region , + display_name , + dataset_id , + target_column_path = None, + input_feature_column_paths = None, + optimization_objective = 'MAXIMIZE_AU_PRC', + train_budget_milli_node_hours = 1000, + ) : from google.cloud import automl client = automl.AutoMlClient() @@ -78,28 +57,43 @@ implementation: print(result) model_name = result.name model_id = model_name.rsplit('/', 1)[-1] - return (model_name, model_id) + model_url = 'https://console.cloud.google.com/automl-tables/locations/{region}/datasets/{dataset_id};modelId={model_id};task=basic/train?project={project_id}'.format( + project_id=gcp_project_id, + region=gcp_region, + dataset_id=dataset_id, + model_id=model_id, + ) + + return (model_name, model_id, model_url) + + def _serialize_str(str_value: str) -> str: + if not isinstance(str_value, str): + raise TypeError('Value "{}" has type "{}" instead of str.'.format(str(str_value), str(type(str_value)))) + return str_value import json import argparse - _missing_arg = object() _parser = argparse.ArgumentParser(prog='Automl create model for tables', description='') - _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=True, default=_missing_arg) - _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=True, default=_missing_arg) - _parser.add_argument("--display-name", dest="display_name", type=str, required=True, default=_missing_arg) - _parser.add_argument("--dataset-id", dest="dataset_id", type=str, required=True, default=_missing_arg) - _parser.add_argument("--target-column-path", dest="target_column_path", type=str, required=False, default=_missing_arg) - _parser.add_argument("--input-feature-column-paths", dest="input_feature_column_paths", type=json.loads, required=False, default=_missing_arg) - _parser.add_argument("--optimization-objective", dest="optimization_objective", type=str, required=False, default=_missing_arg) - _parser.add_argument("--train-budget-milli-node-hours", dest="train_budget_milli_node_hours", type=int, required=False, default=_missing_arg) - _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=2) - _parsed_args = {k: v for k, v in vars(_parser.parse_args()).items() if v is not _missing_arg} + _parser.add_argument("--gcp-project-id", dest="gcp_project_id", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--gcp-region", dest="gcp_region", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--display-name", dest="display_name", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--dataset-id", dest="dataset_id", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--target-column-path", dest="target_column_path", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--input-feature-column-paths", dest="input_feature_column_paths", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--optimization-objective", dest="optimization_objective", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--train-budget-milli-node-hours", dest="train_budget_milli_node_hours", type=int, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=3) + _parsed_args = vars(_parser.parse_args()) _output_files = _parsed_args.pop("_output_paths", []) _outputs = automl_create_model_for_tables(**_parsed_args) - if not hasattr(_outputs, '__getitem__') or isinstance(_outputs, str): - _outputs = [_outputs] + _output_serializers = [ + _serialize_str, + _serialize_str, + str, + + ] import os for idx, output_file in enumerate(_output_files): @@ -108,34 +102,37 @@ implementation: except OSError: pass with open(output_file, 'w') as f: - f.write(str(_outputs[idx])) - image: python:3.7 -inputs: -- name: gcp_project_id - type: String -- name: gcp_region - type: String -- name: display_name - type: String -- name: dataset_id - type: String -- name: target_column_path - optional: true - type: String -- name: input_feature_column_paths - optional: true - type: JsonArray -- default: MAXIMIZE_AU_PRC - name: optimization_objective - optional: true - type: String -- default: '1000' - name: train_budget_milli_node_hours - optional: true - type: Integer -name: Automl create model for tables -outputs: -- name: model_path - type: String -- name: model_id - type: String + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --gcp-project-id + - {inputValue: gcp_project_id} + - --gcp-region + - {inputValue: gcp_region} + - --display-name + - {inputValue: display_name} + - --dataset-id + - {inputValue: dataset_id} + - if: + cond: {isPresent: target_column_path} + then: + - --target-column-path + - {inputValue: target_column_path} + - if: + cond: {isPresent: input_feature_column_paths} + then: + - --input-feature-column-paths + - {inputValue: input_feature_column_paths} + - if: + cond: {isPresent: optimization_objective} + then: + - --optimization-objective + - {inputValue: optimization_objective} + - if: + cond: {isPresent: train_budget_milli_node_hours} + then: + - --train-budget-milli-node-hours + - {inputValue: train_budget_milli_node_hours} + - '----output-paths' + - {outputPath: model_path} + - {outputPath: model_id} + - {outputPath: model_page_url} diff --git a/components/gcp/automl/export_data_to_gcs/component.py b/components/gcp/automl/export_data_to_gcs/component.py new file mode 100644 index 00000000000..499129f7635 --- /dev/null +++ b/components/gcp/automl/export_data_to_gcs/component.py @@ -0,0 +1,53 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import NamedTuple + + +def automl_export_data_to_gcs( + dataset_path: str, + gcs_output_uri_prefix: str = None, + #retry=None, #=google.api_core.gapic_v1.method.DEFAULT, + timeout: float = None, #=google.api_core.gapic_v1.method.DEFAULT, + metadata: dict = {}, +) -> NamedTuple('Outputs', [('gcs_output_uri_prefix', str)]): + """Exports dataset data to GCS.""" + import sys + import subprocess + subprocess.run([sys.executable, "-m", "pip", "install", "google-cloud-automl==0.4.0", "--quiet", "--no-warn-script-location"], env={"PIP_DISABLE_PIP_VERSION_CHECK": "1"}, check=True) + + import google + from google.cloud import automl + client = automl.AutoMlClient() + + output_config = {"gcs_destination": {"output_uri_prefix": gcs_output_uri_prefix}} + + response = client.export_data( + name=dataset_path, + output_config=output_config, + #retry=retry or google.api_core.gapic_v1.method.DEFAULT + timeout=timeout or google.api_core.gapic_v1.method.DEFAULT, + metadata=metadata, + ) + print('Operation started:') + print(response.operation) + result = response.result() + metadata = response.metadata + print('Operation finished:') + print(metadata) + return (gcs_output_uri_prefix, ) + +if __name__ == '__main__': + import kfp + kfp.components.func_to_container_op(automl_export_data_to_gcs, output_component_file='component.yaml', base_image='python:3.7') diff --git a/components/gcp/automl/export_data_to_gcs/component.yaml b/components/gcp/automl/export_data_to_gcs/component.yaml new file mode 100644 index 00000000000..2a7778eb0d7 --- /dev/null +++ b/components/gcp/automl/export_data_to_gcs/component.yaml @@ -0,0 +1,113 @@ +name: Automl export data to gcs +description: | + Exports dataset data to GCS. +inputs: +- name: dataset_path + type: String +- name: gcs_output_uri_prefix + optional: true + type: String +- name: timeout + optional: true + type: Float +- default: '{}' + name: metadata + optional: true + type: JsonObject +outputs: +- name: gcs_output_uri_prefix + type: String +implementation: + container: + image: python:3.7 + command: + - python3 + - -u + - -c + - | + from typing import NamedTuple + + def automl_export_data_to_gcs( + dataset_path: str, + gcs_output_uri_prefix: str = None, + #retry=None, #=google.api_core.gapic_v1.method.DEFAULT, + timeout: float = None, #=google.api_core.gapic_v1.method.DEFAULT, + metadata: dict = {}, + ) -> NamedTuple('Outputs', [('gcs_output_uri_prefix', str)]): + """Exports dataset data to GCS.""" + import sys + import subprocess + subprocess.run([sys.executable, "-m", "pip", "install", "google-cloud-automl==0.4.0", "--quiet", "--no-warn-script-location"], env={"PIP_DISABLE_PIP_VERSION_CHECK": "1"}, check=True) + + import google + from google.cloud import automl + client = automl.AutoMlClient() + + output_config = {"gcs_destination": {"output_uri_prefix": gcs_output_uri_prefix}} + + response = client.export_data( + name=dataset_path, + output_config=output_config, + #retry=retry or google.api_core.gapic_v1.method.DEFAULT + timeout=timeout or google.api_core.gapic_v1.method.DEFAULT, + metadata=metadata, + ) + print('Operation started:') + print(response.operation) + result = response.result() + metadata = response.metadata + print('Operation finished:') + print(metadata) + return (gcs_output_uri_prefix, ) + + import json + import argparse + _parser = argparse.ArgumentParser(prog='Automl export data to gcs', description='Exports dataset data to GCS.\n') + _parser.add_argument("--dataset-path", dest="dataset_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--gcs-output-uri-prefix", dest="gcs_output_uri_prefix", type=str, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--timeout", dest="timeout", type=float, required=False, default=argparse.SUPPRESS) + _parser.add_argument("--metadata", dest="metadata", type=json.loads, required=False, default=argparse.SUPPRESS) + _parser.add_argument("----output-paths", dest="_output_paths", type=str, nargs=1) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = automl_export_data_to_gcs(**_parsed_args) + + if not hasattr(_outputs, '__getitem__') or isinstance(_outputs, str): + _outputs = [_outputs] + + _output_serializers = [ + str + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --dataset-path + - inputValue: dataset_path + - if: + cond: + isPresent: gcs_output_uri_prefix + then: + - --gcs-output-uri-prefix + - inputValue: gcs_output_uri_prefix + - if: + cond: + isPresent: timeout + then: + - --timeout + - inputValue: timeout + - if: + cond: + isPresent: metadata + then: + - --metadata + - inputValue: metadata + - '----output-paths' + - outputPath: gcs_output_uri_prefix diff --git a/components/gcp/automl/import_data_from_bigquery/component.yaml b/components/gcp/automl/import_data_from_bigquery/component.yaml index 1a5a996ed13..2d291c755b6 100644 --- a/components/gcp/automl/import_data_from_bigquery/component.yaml +++ b/components/gcp/automl/import_data_from_bigquery/component.yaml @@ -1,30 +1,21 @@ +name: Automl import data from bigquery +inputs: +- name: dataset_path +- name: input_uri + type: String +- name: retry + optional: true +- name: timeout + optional: true +- name: metadata + type: JsonObject + optional: true +outputs: +- name: dataset_path + type: String implementation: container: - args: - - --dataset-path - - inputValue: dataset_path - - --input-uri - - inputValue: input_uri - - if: - cond: - isPresent: retry - then: - - --retry - - inputValue: retry - - if: - cond: - isPresent: timeout - then: - - --timeout - - inputValue: timeout - - if: - cond: - isPresent: metadata - then: - - --metadata - - inputValue: metadata - - '----output-paths' - - outputPath: dataset_path + image: python:3.7 command: - python3 - -u @@ -90,19 +81,28 @@ implementation: pass with open(output_file, 'w') as f: f.write(str(_outputs[idx])) - image: python:3.7 -inputs: -- name: dataset_path -- name: input_uri - type: String -- name: retry - optional: true -- name: timeout - optional: true -- name: metadata - optional: true - type: JsonObject -name: Automl import data from bigquery -outputs: -- name: dataset_path - type: String + args: + - --dataset-path + - inputValue: dataset_path + - --input-uri + - inputValue: input_uri + - if: + cond: + isPresent: retry + then: + - --retry + - inputValue: retry + - if: + cond: + isPresent: timeout + then: + - --timeout + - inputValue: timeout + - if: + cond: + isPresent: metadata + then: + - --metadata + - inputValue: metadata + - '----output-paths' + - outputPath: dataset_path diff --git a/components/gcp/automl/import_data_from_gcs/component.yaml b/components/gcp/automl/import_data_from_gcs/component.yaml index b5cc16c39e0..95da1038565 100644 --- a/components/gcp/automl/import_data_from_gcs/component.yaml +++ b/components/gcp/automl/import_data_from_gcs/component.yaml @@ -1,30 +1,22 @@ +name: Automl import data from gcs +inputs: +- name: dataset_path + type: String +- name: input_uris + type: JsonArray +- name: retry + optional: true +- name: timeout + optional: true +- name: metadata + type: JsonObject + optional: true +outputs: +- name: dataset_path + type: String implementation: container: - args: - - --dataset-path - - inputValue: dataset_path - - --input-uris - - inputValue: input_uris - - if: - cond: - isPresent: retry - then: - - --retry - - inputValue: retry - - if: - cond: - isPresent: timeout - then: - - --timeout - - inputValue: timeout - - if: - cond: - isPresent: metadata - then: - - --metadata - - inputValue: metadata - - '----output-paths' - - outputPath: dataset_path + image: python:3.7 command: - python3 - -u @@ -90,20 +82,28 @@ implementation: pass with open(output_file, 'w') as f: f.write(str(_outputs[idx])) - image: python:3.7 -inputs: -- name: dataset_path - type: String -- name: input_uris - type: JsonArray -- name: retry - optional: true -- name: timeout - optional: true -- name: metadata - optional: true - type: JsonObject -name: Automl import data from gcs -outputs: -- name: dataset_path - type: String + args: + - --dataset-path + - inputValue: dataset_path + - --input-uris + - inputValue: input_uris + - if: + cond: + isPresent: retry + then: + - --retry + - inputValue: retry + - if: + cond: + isPresent: timeout + then: + - --timeout + - inputValue: timeout + - if: + cond: + isPresent: metadata + then: + - --metadata + - inputValue: metadata + - '----output-paths' + - outputPath: dataset_path diff --git a/components/gcp/automl/prediction_service_batch_predict/component.yaml b/components/gcp/automl/prediction_service_batch_predict/component.yaml index 0299f34c4b9..6f73375988d 100644 --- a/components/gcp/automl/prediction_service_batch_predict/component.yaml +++ b/components/gcp/automl/prediction_service_batch_predict/component.yaml @@ -1,59 +1,35 @@ +name: Automl prediction service batch predict +inputs: +- name: model_path +- name: gcs_input_uris + type: String + optional: true +- name: gcs_output_uri_prefix + type: String + optional: true +- name: bq_input_uri + type: String + optional: true +- name: bq_output_uri + type: String + optional: true +- name: params + optional: true +- name: retry + optional: true +- name: timeout + optional: true +- name: metadata + type: JsonObject + optional: true +outputs: +- name: gcs_output_directory + type: String +- name: bigquery_output_dataset + type: String implementation: container: - args: - - --model-path - - inputValue: model_path - - if: - cond: - isPresent: gcs_input_uris - then: - - --gcs-input-uris - - inputValue: gcs_input_uris - - if: - cond: - isPresent: gcs_output_uri_prefix - then: - - --gcs-output-uri-prefix - - inputValue: gcs_output_uri_prefix - - if: - cond: - isPresent: bq_input_uri - then: - - --bq-input-uri - - inputValue: bq_input_uri - - if: - cond: - isPresent: bq_output_uri - then: - - --bq-output-uri - - inputValue: bq_output_uri - - if: - cond: - isPresent: params - then: - - --params - - inputValue: params - - if: - cond: - isPresent: retry - then: - - --retry - - inputValue: retry - - if: - cond: - isPresent: timeout - then: - - --timeout - - inputValue: timeout - - if: - cond: - isPresent: metadata - then: - - --metadata - - inputValue: metadata - - '----output-paths' - - outputPath: gcs_output_directory - - outputPath: bigquery_output_dataset + image: python:3.7 command: - python3 - -u @@ -139,33 +115,57 @@ implementation: pass with open(output_file, 'w') as f: f.write(str(_outputs[idx])) - image: python:3.7 -inputs: -- name: model_path -- name: gcs_input_uris - optional: true - type: String -- name: gcs_output_uri_prefix - optional: true - type: String -- name: bq_input_uri - optional: true - type: String -- name: bq_output_uri - optional: true - type: String -- name: params - optional: true -- name: retry - optional: true -- name: timeout - optional: true -- name: metadata - optional: true - type: JsonObject -name: Automl prediction service batch predict -outputs: -- name: gcs_output_directory - type: String -- name: bigquery_output_dataset - type: String + args: + - --model-path + - inputValue: model_path + - if: + cond: + isPresent: gcs_input_uris + then: + - --gcs-input-uris + - inputValue: gcs_input_uris + - if: + cond: + isPresent: gcs_output_uri_prefix + then: + - --gcs-output-uri-prefix + - inputValue: gcs_output_uri_prefix + - if: + cond: + isPresent: bq_input_uri + then: + - --bq-input-uri + - inputValue: bq_input_uri + - if: + cond: + isPresent: bq_output_uri + then: + - --bq-output-uri + - inputValue: bq_output_uri + - if: + cond: + isPresent: params + then: + - --params + - inputValue: params + - if: + cond: + isPresent: retry + then: + - --retry + - inputValue: retry + - if: + cond: + isPresent: timeout + then: + - --timeout + - inputValue: timeout + - if: + cond: + isPresent: metadata + then: + - --metadata + - inputValue: metadata + - '----output-paths' + - outputPath: gcs_output_directory + - outputPath: bigquery_output_dataset diff --git a/components/gcp/automl/split_dataset_table_column_names/component.py b/components/gcp/automl/split_dataset_table_column_names/component.py index 0f19842a03c..9ed5ebf8ea9 100644 --- a/components/gcp/automl/split_dataset_table_column_names/component.py +++ b/components/gcp/automl/split_dataset_table_column_names/component.py @@ -40,7 +40,7 @@ def automl_split_dataset_table_column_names( target_column_spec = [s for s in column_specs if s.display_name == target_column_name][0] feature_column_specs = [s for s in column_specs if s.display_name != target_column_name] feature_column_names = [s.name for s in feature_column_specs] - + import json return (target_column_spec.name, json.dumps(feature_column_names)) diff --git a/components/gcp/automl/split_dataset_table_column_names/component.yaml b/components/gcp/automl/split_dataset_table_column_names/component.yaml index 27e9fee18e9..a882292ec0f 100644 --- a/components/gcp/automl/split_dataset_table_column_names/component.yaml +++ b/components/gcp/automl/split_dataset_table_column_names/component.yaml @@ -1,19 +1,21 @@ +name: Automl split dataset table column names +inputs: +- name: dataset_path + type: String +- name: target_column_name + type: String +- name: table_index + type: Integer + default: '0' + optional: true +outputs: +- name: target_column_path + type: String +- name: feature_column_paths + type: JsonArray implementation: container: - args: - - --dataset-path - - inputValue: dataset_path - - --target-column-name - - inputValue: target_column_name - - if: - cond: - isPresent: table_index - then: - - --table-index - - inputValue: table_index - - '----output-paths' - - outputPath: target_column_path - - outputPath: feature_column_paths + image: python:3.7 command: - python3 - -u @@ -73,19 +75,17 @@ implementation: pass with open(output_file, 'w') as f: f.write(str(_outputs[idx])) - image: python:3.7 -inputs: -- name: dataset_path - type: String -- name: target_column_name - type: String -- default: '0' - name: table_index - optional: true - type: Integer -name: Automl split dataset table column names -outputs: -- name: target_column_path - type: String -- name: feature_column_paths - type: JsonArray + args: + - --dataset-path + - inputValue: dataset_path + - --target-column-name + - inputValue: target_column_name + - if: + cond: + isPresent: table_index + then: + - --table-index + - inputValue: table_index + - '----output-paths' + - outputPath: target_column_path + - outputPath: feature_column_paths diff --git a/components/gcp/bigquery/query/README.md b/components/gcp/bigquery/query/README.md index 4e1f738b2fb..e5802eae14f 100644 --- a/components/gcp/bigquery/query/README.md +++ b/components/gcp/bigquery/query/README.md @@ -52,11 +52,7 @@ output_gcs_path | The path to the Cloud Storage bucket containing the query outp To use the component, the following requirements must be met: * The BigQuery API is enabled. -* The component is running under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow Pipeline cluster. For example: - - ``` - bigquery_query_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. * The Kubeflow user service account is a member of the `roles/bigquery.admin` role of the project. * The Kubeflow user service account is a member of the `roles/storage.objectCreator `role of the Cloud Storage output bucket. @@ -89,7 +85,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp bigquery_query_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/bigquery/query/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/bigquery/query/component.yaml') help(bigquery_query_op) ``` @@ -125,7 +121,6 @@ OUTPUT_PATH = '{}/bigquery/query/questions.csv'.format(GCS_WORKING_DIR) ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Bigquery query pipeline', @@ -147,7 +142,7 @@ def pipeline( table_id=table_id, output_gcs_path=output_gcs_path, dataset_location=dataset_location, - job_config=job_config).apply(gcp.use_gcp_secret('user-gcp-sa')) + job_config=job_config) ``` #### Compile the pipeline diff --git a/components/gcp/bigquery/query/component.yaml b/components/gcp/bigquery/query/component.yaml index da9cd3f334e..a6c3a346294 100644 --- a/components/gcp/bigquery/query/component.yaml +++ b/components/gcp/bigquery/query/component.yaml @@ -59,8 +59,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.bigquery, query, --query, {inputValue: query}, --project_id, {inputValue: project_id}, @@ -68,10 +69,9 @@ implementation: --table_id, {inputValue: table_id}, --dataset_location, {inputValue: dataset_location}, --output_gcs_path, {inputValue: output_gcs_path}, - --job_config, {inputValue: job_config} + --job_config, {inputValue: job_config}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: output_gcs_path: /tmp/kfp/output/bigquery/query-output-path.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/bigquery/query/sample.ipynb b/components/gcp/bigquery/query/sample.ipynb index 02a12e7be45..b1ad4c793fa 100644 --- a/components/gcp/bigquery/query/sample.ipynb +++ b/components/gcp/bigquery/query/sample.ipynb @@ -57,11 +57,7 @@ "To use the component, the following requirements must be met:\n", "\n", "* The BigQuery API is enabled.\n", - "* The component is running under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow Pipeline cluster. For example:\n", - "\n", - " ```\n", - " bigquery_query_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", + "* The component can authenticate to use GCP APIs. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* The Kubeflow user service account is a member of the `roles/bigquery.admin` role of the project.\n", "* The Kubeflow user service account is a member of the `roles/storage.objectCreator `role of the Cloud Storage output bucket.\n", "\n", @@ -108,7 +104,7 @@ "import kfp.components as comp\n", "\n", "bigquery_query_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/bigquery/query/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/bigquery/query/component.yaml')\n", "help(bigquery_query_op)" ] }, @@ -179,7 +175,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Bigquery query pipeline',\n", @@ -201,7 +196,7 @@ " table_id=table_id, \n", " output_gcs_path=output_gcs_path, \n", " dataset_location=dataset_location, \n", - " job_config=job_config).apply(gcp.use_gcp_secret('user-gcp-sa'))" + " job_config=job_config)" ] }, { @@ -301,4 +296,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/container/component_sdk/python/kfp_component/google/bigquery/_query.py b/components/gcp/container/component_sdk/python/kfp_component/google/bigquery/_query.py index bf147d7b37c..c94c81a8ea2 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/google/bigquery/_query.py +++ b/components/gcp/container/component_sdk/python/kfp_component/google/bigquery/_query.py @@ -48,6 +48,8 @@ def query(query, project_id, dataset_id=None, table_id=None, job_config = bigquery.QueryJobConfig() job_config.create_disposition = bigquery.job.CreateDisposition.CREATE_IF_NEEDED job_config.write_disposition = bigquery.job.WriteDisposition.WRITE_TRUNCATE + else: + job_config = bigquery.QueryJobConfig.from_api_repr(job_config) job_id = None def cancel(): if job_id: @@ -118,10 +120,10 @@ def _dump_outputs(job, output_path, table_ref): gcp_common.dump_file(KFP_OUTPUT_PATH + 'bigquery/query-job.json', json.dumps(job.to_api_repr())) if not output_path: - output_path = '' + output_path = '-' # Replace with empty string when we upgrade to Argo version which has the fix: https://github.com/argoproj/argo/pull/1653 gcp_common.dump_file(KFP_OUTPUT_PATH + 'bigquery/query-output-path.txt', output_path) - (dataset_id, table_id) = (table_ref.dataset_id, table_ref.table_id) if table_ref else ('', '') + (dataset_id, table_id) = (table_ref.dataset_id, table_ref.table_id) if table_ref else ('-', '-') gcp_common.dump_file(KFP_OUTPUT_PATH + 'bigquery/query-dataset-id.txt', dataset_id) gcp_common.dump_file(KFP_OUTPUT_PATH + 'bigquery/query-table-id.txt', diff --git a/components/gcp/container/component_sdk/python/kfp_component/google/common/_utils.py b/components/gcp/container/component_sdk/python/kfp_component/google/common/_utils.py index fe58855a4bd..cf00fa9c363 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/google/common/_utils.py +++ b/components/gcp/container/component_sdk/python/kfp_component/google/common/_utils.py @@ -102,16 +102,15 @@ def wait_operation_done(get_operation, wait_interval): Returns: The completed operation. """ - operation = None while True: operation = get_operation() operation_name = operation.get('name') done = operation.get('done', False) - if done: - break - logging.info('Operation {} is not done. Wait for {}s.'.format( - operation_name, wait_interval)) - time.sleep(wait_interval) + if not done: + logging.info('Operation {} is not done. Wait for {}s.'.format( + operation_name, wait_interval)) + time.sleep(wait_interval) + continue error = operation.get('error', None) if error: raise RuntimeError('Failed to complete operation {}: {} {}'.format( @@ -119,5 +118,5 @@ def wait_operation_done(get_operation, wait_interval): error.get('code', 'Unknown code'), error.get('message', 'Unknown message'), )) - return operation + return operation diff --git a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_common_ops.py b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_common_ops.py index fd3949fddbd..d60a30e173a 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_common_ops.py +++ b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_common_ops.py @@ -68,7 +68,7 @@ def wait_for_operation_done(ml_client, operation_name, action, wait_interval): )) return operation -def wait_for_job_done(ml_client, project_id, job_id, wait_interval): +def wait_for_job_done(ml_client, project_id, job_id, wait_interval, show_tensorboard=True): """Waits for a CMLE job done. Args: @@ -76,6 +76,7 @@ def wait_for_job_done(ml_client, project_id, job_id, wait_interval): project_id: the ID of the project which has the job job_id: the ID of the job to wait wait_interval: the interval in seconds to wait between polls. + show_tensorboard: True to dump Tensorboard metadata. Returns: The completed job. @@ -88,7 +89,7 @@ def wait_for_job_done(ml_client, project_id, job_id, wait_interval): job = ml_client.get_job(project_id, job_id) print(job) if not metadata_dumped: - _dump_job_metadata(project_id, job_id, job) + _dump_job_metadata(project_id, job_id, job, show_tensorboard=show_tensorboard) metadata_dumped = True if job.get('state', None) in ['SUCCEEDED', 'FAILED', 'CANCELLED']: break @@ -104,7 +105,7 @@ def wait_for_job_done(ml_client, project_id, job_id, wait_interval): job['state'], job.get('errorMessage', ''))) return job -def _dump_job_metadata(project_id, job_id, job): +def _dump_job_metadata(project_id, job_id, job, show_tensorboard=True): display.display(display.Link( 'https://console.cloud.google.com/mlengine/jobs/{}?project={}'.format( job_id, project_id), @@ -115,7 +116,7 @@ def _dump_job_metadata(project_id, job_id, job): project_id, job_id), 'Logs' )) - if 'trainingInput' in job and 'jobDir' in job['trainingInput']: + if show_tensorboard and 'trainingInput' in job and 'jobDir' in job['trainingInput']: display.display(display.Tensorboard( job['trainingInput']['jobDir'])) diff --git a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_wait_job.py b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_wait_job.py index 472ba2c14c5..b6d9c2e5391 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_wait_job.py +++ b/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_wait_job.py @@ -18,7 +18,7 @@ from ._client import MLEngineClient from .. import common as gcp_common -def wait_job(project_id, job_id, wait_interval=30): +def wait_job(project_id, job_id, wait_interval=30, show_tensorboard=True): """Waits a MLEngine job. Args: @@ -26,6 +26,7 @@ def wait_job(project_id, job_id, wait_interval=30): job_id (str): Required. The ID of the job to wait. wait_interval (int): optional wait interval between calls to get job status. Defaults to 30. + show_tensorboard (bool): optional. True to dump Tensorboard metadata. Outputs: /tmp/kfp/output/ml_engine/job.json: The json payload of the waiting job. @@ -34,4 +35,4 @@ def wait_job(project_id, job_id, wait_interval=30): """ ml_client = MLEngineClient() with KfpExecutionContext(on_cancel=lambda: cancel_job(ml_client, project_id, job_id)): - return wait_for_job_done(ml_client, project_id, job_id, wait_interval) \ No newline at end of file + return wait_for_job_done(ml_client, project_id, job_id, wait_interval, show_tensorboard = show_tensorboard) \ No newline at end of file diff --git a/components/gcp/container/component_sdk/python/kfp_component/launcher/__main__.py b/components/gcp/container/component_sdk/python/kfp_component/launcher/__main__.py index 7d9937935c9..04f7d4b8cd2 100644 --- a/components/gcp/container/component_sdk/python/kfp_component/launcher/__main__.py +++ b/components/gcp/container/component_sdk/python/kfp_component/launcher/__main__.py @@ -15,6 +15,7 @@ import argparse import fire import importlib +import os import sys import logging from .launcher import launch @@ -26,8 +27,18 @@ def main(): description='Launch a python module or file.') parser.add_argument('file_or_module', type=str, help='Either a python file path or a module name.') + parser.add_argument( + '--ui_metadata_path', + type=str, + default='/mlpipeline-ui-metadata.json', + help='Path for the file where the mlpipeline-ui-metadata.json data ' + 'should be written.') parser.add_argument('args', nargs=argparse.REMAINDER) args = parser.parse_args() + + if args.ui_metadata_path: + os.environ['KFP_UI_METADATA_PATH'] = args.ui_metadata_path + launch(args.file_or_module, args.args) if __name__ == '__main__': diff --git a/components/gcp/container/component_sdk/python/setup.py b/components/gcp/container/component_sdk/python/setup.py index 31367f1000c..aaf85125965 100644 --- a/components/gcp/container/component_sdk/python/setup.py +++ b/components/gcp/container/component_sdk/python/setup.py @@ -15,7 +15,7 @@ from setuptools import setup PACKAGE_NAME = 'kfp-component' -VERSION = '0.1.31' +VERSION = '0.4.0' setup( name=PACKAGE_NAME, diff --git a/components/gcp/dataflow/launch_python/README.md b/components/gcp/dataflow/launch_python/README.md index be57d9e729b..dd62d02ae7e 100644 --- a/components/gcp/dataflow/launch_python/README.md +++ b/components/gcp/dataflow/launch_python/README.md @@ -1,12 +1,34 @@ # Name -Data preparation by executing an Apache Beam job in Cloud Dataflow +Component: Data preparation by executing an Apache Beam job in Cloud Dataflow # Labels -GCP, Cloud Dataflow, Apache Beam, Python, Kubeflow +Cloud Dataflow, Apache Beam, Kubeflow # Summary -A Kubeflow Pipeline component that prepares data by submitting an Apache Beam job (authored in Python) to Cloud Dataflow for execution. The Python Beam code is run with Cloud Dataflow Runner. +A Kubeflow pipeline component that prepares data by submitting an Apache Beam job (authored in Python) to Cloud Dataflow for execution. The Python Beam code is run with Cloud Dataflow Runner. + +# Facets + +Use case: +Other + +Technique: +Other + +Input data type: +Tabular + +ML workflow: +Data preparation # Details ## Intended use @@ -16,12 +38,12 @@ Use this component to run a Python Beam code to submit a Cloud Dataflow job as a ## Runtime arguments Name | Description | Optional | Data type| Accepted values | Default | :--- | :----------| :----------| :----------| :----------| :---------- | -python_file_path | The path to the Cloud Storage bucket or local directory containing the Python file to be run. | | GCSPath | | | -project_id | The ID of the Google Cloud Platform (GCP) project containing the Cloud Dataflow job.| | GCPProjectID | | | -staging_dir | The path to the Cloud Storage directory where the staging files are stored. A random subdirectory will be created under the staging directory to keep the job information.This is done so that you can resume the job in case of failure. `staging_dir` is passed as the command line arguments (`staging_location` and `temp_location`) of the Beam code. | Yes | GCSPath | | None | -requirements_file_path | The path to the Cloud Storage bucket or local directory containing the pip requirements file. | Yes | GCSPath | | None | +python_file_path | The path to the Cloud Storage bucket or local directory containing the Python file to be run. | - | GCSPath | - | - | +project_id | The ID of the Google Cloud Platform (GCP) project containing the Cloud Dataflow job.| -| GCPProjectID | -| -| +staging_dir | The path to the Cloud Storage directory where the staging files are stored. A random subdirectory will be created under the staging directory to keep the job information.This is done so that you can resume the job in case of failure. The command line arguments, `staging_location` and `temp_location`, of the Beam code are passed through `staging_dir`. | Yes | GCSPath | - | None | +requirements_file_path | The path to the Cloud Storage bucket or local directory containing the pip requirements file. | Yes | GCSPath | - | None | args | The list of arguments to pass to the Python file. | No | List | A list of string arguments | None | -wait_interval | The number of seconds to wait between calls to get the status of the job. | Yes | Integer | | 30 | +wait_interval | The number of seconds to wait between calls to get the status of the job. | Yes | Integer | - | 30 | ## Input data schema @@ -31,25 +53,21 @@ Before you use the component, the following files must be ready in a Cloud Stora The Beam Python code should follow the [Beam programming guide](https://beam.apache.org/documentation/programming-guide/) as well as the following additional requirements to be compatible with this component: - It accepts the command line arguments `--project`, `--temp_location`, `--staging_location`, which are [standard Dataflow Runner options](https://cloud.google.com/dataflow/docs/guides/specifying-exec-params#setting-other-cloud-pipeline-options). -- It enables `info logging` before the start of a Cloud Dataflow job in the Python code. This is important to allow the component to track the status and ID of the job that is created. For example, calling `logging.getLogger().setLevel(logging.INFO)` before any other code. - +- It enables `info logging` before the start of a Cloud Dataflow job in the Python code. This allows the component to track the status and ID of the job that is created. For example, calling `logging.getLogger().setLevel(logging.INFO)` before any other code. ## Output Name | Description :--- | :---------- -job_id | The id of the Cloud Dataflow job that is created. +job_id | The ID of the Cloud Dataflow job that is created. ## Cautions & requirements To use the components, the following requirements must be met: - Cloud Dataflow API is enabled. -- The component is running under a secret Kubeflow user service account in a Kubeflow Pipeline cluster. For example: -``` -component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) -``` -The Kubeflow user service account is a member of: -- `roles/dataflow.developer` role of the project. -- `roles/storage.objectViewer` role of the Cloud Storage Objects `python_file_path` and `requirements_file_path`. -- `roles/storage.objectCreator` role of the Cloud Storage Object `staging_dir`. +- The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. +- The Kubeflow user service account is a member of: + - `roles/dataflow.developer` role of the project. + - `roles/storage.objectViewer` role of the Cloud Storage Objects `python_file_path` and `requirements_file_path`. + - `roles/storage.objectCreator` role of the Cloud Storage Object `staging_dir`. ## Detailed description The component does several things during the execution: @@ -59,83 +77,49 @@ The component does several things during the execution: - Stores the Cloud Dataflow job information in `staging_dir` so the job can be resumed in case of failure. - Waits for the job to finish. The steps to use the component in a pipeline are: -1. Install the Kubeflow Pipelines SDK: +1. Install the Kubeflow pipeline's SDK: + ```python + %%capture --no-stderr + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -```python -%%capture --no-stderr - -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` - -2. Load the component using KFP SDK +2. Load the component using the Kubeflow pipeline's SDK: + ```python + import kfp.components as comp -```python -import kfp.components as comp - -dataflow_python_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataflow/launch_python/component.yaml') -help(dataflow_python_op) -``` + dataflow_python_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataflow/launch_python/component.yaml') + help(dataflow_python_op) + ``` ### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. -In this sample, we run a wordcount sample code in a Kubeflow Pipeline. The output will be stored in a Cloud Storage bucket. Here is the sample code: - +The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. +In this sample, we run a wordcount sample code in a Kubeflow pipeline. The output will be stored in a Cloud Storage bucket. Here is the sample code: ```python !gsutil cat gs://ml-pipeline-playground/samples/dataflow/wc/wc.py ``` - - # - # Licensed to the Apache Software Foundation (ASF) under one or more - # contributor license agreements. See the NOTICE file distributed with - # this work for additional information regarding copyright ownership. - # The ASF licenses this file to You under the Apache License, Version 2.0 - # (the "License"); you may not use this file except in compliance with - # the License. You may obtain a copy of the License at - # - # http://www.apache.org/licenses/LICENSE-2.0 - # - # Unless required by applicable law or agreed to in writing, software - # distributed under the License is distributed on an "AS IS" BASIS, - # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - # See the License for the specific language governing permissions and - # limitations under the License. - # - - """A minimalist word-counting workflow that counts words in Shakespeare. - - This is the first in a series of successively more detailed 'word count' - examples. - - Next, see the wordcount pipeline, then the wordcount_debugging pipeline, for - more detailed examples that introduce additional concepts. - Concepts: +Concepts: - 1. Reading data from text files - 2. Specifying 'inline' transforms - 3. Counting a PCollection - 4. Writing data to Cloud Storage as text files - - To execute this pipeline locally, first edit the code to specify the output - location. Output location could be a local file path or an output prefix - on GCS. (Only update the output location marked with the first CHANGE comment.) +1. Reading data from text files. +2. Specifying inline transforms. +3. Counting a PCollection. +4. Writing data to Cloud Storage as text files. + +Notes: + +To execute this pipeline locally, first edit the code to specify the output location. Output location could be a local file path or an output prefix on Cloud Storage. (Only update the output location marked with the first CHANGE comment in the following code.) - To execute this pipeline remotely, first edit the code to set your project ID, - runner type, the staging location, the temp location, and the output location. - The specified GCS bucket(s) must already exist. (Update all the places marked - with a CHANGE comment.) +To execute this pipeline remotely, first edit the code to set your project ID, runner type, the staging location, the temp location, and the output location. +The specified Cloud Storage bucket(s) must already exist. (Update all the places marked with a CHANGE comment in the following code.) - Then, run the pipeline as described in the README. It will be deployed and run - using the Google Cloud Dataflow Service. No args are required to run the - pipeline. You can see the results in your output bucket in the GCS browser. - """ +Then, run the pipeline as described in the README. It will be deployed and run using the Cloud Dataflow service. No arguments are required to run the pipeline. You can see the results in your output bucket in the Cloud Storage browser. +```python from __future__ import absolute_import import argparse @@ -161,22 +145,22 @@ In this sample, we run a wordcount sample code in a Kubeflow Pipeline. The outpu help='Input file to process.') parser.add_argument('--output', dest='output', - # CHANGE 1/5: The Google Cloud Storage path is required - # for outputting the results. + # CHANGE 1/5: The Cloud Storage path is required + # to output the results. default='gs://YOUR_OUTPUT_BUCKET/AND_OUTPUT_PREFIX', help='Output file to write results to.') known_args, pipeline_args = parser.parse_known_args(argv) # pipeline_args.extend([ # # CHANGE 2/5: (OPTIONAL) Change this to DataflowRunner to - # # run your pipeline on the Google Cloud Dataflow Service. + # # run your pipeline on the Cloud Dataflow Service. # '--runner=DirectRunner', # # CHANGE 3/5: Your project ID is required in order to run your pipeline on - # # the Google Cloud Dataflow Service. + # # the Cloud Dataflow Service. # '--project=SET_YOUR_PROJECT_ID_HERE', - # # CHANGE 4/5: Your Google Cloud Storage path is required for staging local + # # CHANGE 4/5: Your Cloud Storage path is required for staging local # # files. # '--staging_location=gs://YOUR_BUCKET_NAME/AND_STAGING_DIRECTORY', - # # CHANGE 5/5: Your Google Cloud Storage path is required for temporary + # # CHANGE 5/5: Your Cloud Storage path is required for temporary # # files. # '--temp_location=gs://YOUR_BUCKET_NAME/AND_TEMP_DIRECTORY', # '--job_name=your-wordcount-job', @@ -214,30 +198,26 @@ In this sample, we run a wordcount sample code in a Kubeflow Pipeline. The outpu if __name__ == '__main__': logging.getLogger().setLevel(logging.INFO) run() - +``` #### Set sample parameters - ```python -# Required Parameters -PROJECT_ID = '' -GCS_STAGING_DIR = 'gs://' # No ending slash +# Required parameters +PROJECT_ID = '' +GCS_STAGING_DIR = 'gs://' # No ending slash ``` - ```python -# Optional Parameters +# Optional parameters EXPERIMENT_NAME = 'Dataflow - Launch Python' OUTPUT_FILE = '{}/wc/wordcount.out'.format(GCS_STAGING_DIR) ``` #### Example pipeline that uses the component - ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Dataflow launch python pipeline', @@ -259,12 +239,11 @@ def pipeline( staging_dir = staging_dir, requirements_file_path = requirements_file_path, args = args, - wait_interval = wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval = wait_interval) ``` #### Compile the pipeline - ```python pipeline_func = pipeline pipeline_filename = pipeline_func.__name__ + '.zip' @@ -274,12 +253,11 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) #### Submit the pipeline for execution - ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) @@ -291,14 +269,13 @@ run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arg #### Inspect the output - ```python !gsutil cat $OUTPUT_FILE ``` ## References -* [Component python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/dataflow/_launch_python.py) -* [Component docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) +* [Component Python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/dataflow/_launch_python.py) +* [Component Docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) * [Sample notebook](https://github.com/kubeflow/pipelines/blob/master/components/gcp/dataflow/launch_python/sample.ipynb) * [Dataflow Python Quickstart](https://cloud.google.com/dataflow/docs/quickstarts/quickstart-python) diff --git a/components/gcp/dataflow/launch_python/component.yaml b/components/gcp/dataflow/launch_python/component.yaml index e3f1978ef14..21fe7b8bab2 100644 --- a/components/gcp/dataflow/launch_python/component.yaml +++ b/components/gcp/dataflow/launch_python/component.yaml @@ -53,18 +53,18 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataflow, launch_python, --python_file_path, {inputValue: python_file_path}, --project_id, {inputValue: project_id}, --staging_dir, {inputValue: staging_dir}, --requirements_file_path, {inputValue: requirements_file_path}, --args, {inputValue: args}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataflow/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataflow/launch_python/sample.ipynb b/components/gcp/dataflow/launch_python/sample.ipynb index 80adefc4f37..745a3cb6806 100644 --- a/components/gcp/dataflow/launch_python/sample.ipynb +++ b/components/gcp/dataflow/launch_python/sample.ipynb @@ -47,14 +47,11 @@ "## Cautions & requirements\n", "To use the components, the following requirements must be met:\n", "- Cloud Dataflow API is enabled.\n", - "- The component is running under a secret Kubeflow user service account in a Kubeflow Pipeline cluster. For example:\n", - "```\n", - "component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - "```\n", - "The Kubeflow user service account is a member of:\n", - "- `roles/dataflow.developer` role of the project.\n", - "- `roles/storage.objectViewer` role of the Cloud Storage Objects `python_file_path` and `requirements_file_path`.\n", - "- `roles/storage.objectCreator` role of the Cloud Storage Object `staging_dir`. \n", + "- The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", + "- The Kubeflow user service account is a member of:\n", + " - `roles/dataflow.developer` role of the project.\n", + " - `roles/storage.objectViewer` role of the Cloud Storage Objects `python_file_path` and `requirements_file_path`.\n", + " - `roles/storage.objectCreator` role of the Cloud Storage Object `staging_dir`. \n", "\n", "## Detailed description\n", "The component does several things during the execution:\n", @@ -95,7 +92,7 @@ "import kfp.components as comp\n", "\n", "dataflow_python_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataflow/launch_python/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataflow/launch_python/component.yaml')\n", "help(dataflow_python_op)" ] }, @@ -295,7 +292,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Dataflow launch python pipeline',\n", @@ -317,7 +313,7 @@ " staging_dir = staging_dir, \n", " requirements_file_path = requirements_file_path, \n", " args = args,\n", - " wait_interval = wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))" + " wait_interval = wait_interval)" ] }, { @@ -417,4 +413,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/dataflow/launch_template/README.md b/components/gcp/dataflow/launch_template/README.md index 32773fcfa6c..afb5067436e 100644 --- a/components/gcp/dataflow/launch_template/README.md +++ b/components/gcp/dataflow/launch_template/README.md @@ -37,11 +37,8 @@ job_id | The id of the Cloud Dataflow job that is created. To use the component, the following requirements must be met: - Cloud Dataflow API is enabled. -- The component is running under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow Pipeline cluster. For example: - ``` - component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` -* The Kubeflow user service account is a member of: +- The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. +- The Kubeflow user service account is a member of: - `roles/dataflow.developer` role of the project. - `roles/storage.objectViewer` role of the Cloud Storage Object `gcs_path.` - `roles/storage.objectCreator` role of the Cloud Storage Object `staging_dir.` @@ -67,7 +64,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp dataflow_template_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataflow/launch_template/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataflow/launch_template/component.yaml') help(dataflow_template_op) ``` @@ -102,7 +99,6 @@ OUTPUT_PATH = '{}/out/wc'.format(GCS_WORKING_DIR) ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Dataflow launch template pipeline', @@ -128,7 +124,7 @@ def pipeline( location = location, validate_only = validate_only, staging_dir = staging_dir, - wait_interval = wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval = wait_interval)) ``` #### Compile the pipeline diff --git a/components/gcp/dataflow/launch_template/component.yaml b/components/gcp/dataflow/launch_template/component.yaml index 071033f0961..ca86d350e27 100644 --- a/components/gcp/dataflow/launch_template/component.yaml +++ b/components/gcp/dataflow/launch_template/component.yaml @@ -63,8 +63,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataflow, launch_template, --project_id, {inputValue: project_id}, --gcs_path, {inputValue: gcs_path}, @@ -78,4 +79,3 @@ implementation: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataflow/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataflow/launch_template/sample.ipynb b/components/gcp/dataflow/launch_template/sample.ipynb index 8e8ab960570..db2bb3e628e 100644 --- a/components/gcp/dataflow/launch_template/sample.ipynb +++ b/components/gcp/dataflow/launch_template/sample.ipynb @@ -42,11 +42,8 @@ "\n", "To use the component, the following requirements must be met:\n", "- Cloud Dataflow API is enabled.\n", - "- The component is running under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow Pipeline cluster. For example:\n", - " ```\n", - " component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", - "* The Kubeflow user service account is a member of:\n", + "- The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", + "- The Kubeflow user service account is a member of:\n", " - `roles/dataflow.developer` role of the project.\n", " - `roles/storage.objectViewer` role of the Cloud Storage Object `gcs_path.`\n", " - `roles/storage.objectCreator` role of the Cloud Storage Object `staging_dir.` \n", @@ -85,7 +82,7 @@ "import kfp.components as comp\n", "\n", "dataflow_template_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataflow/launch_template/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataflow/launch_template/component.yaml')\n", "help(dataflow_template_op)" ] }, @@ -155,7 +152,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Dataflow launch template pipeline',\n", @@ -181,7 +177,7 @@ " location = location, \n", " validate_only = validate_only,\n", " staging_dir = staging_dir,\n", - " wait_interval = wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))" + " wait_interval = wait_interval)" ] }, { @@ -282,4 +278,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/dataproc/create_cluster/README.md b/components/gcp/dataproc/create_cluster/README.md index 743e1159b77..48028a0e810 100644 --- a/components/gcp/dataproc/create_cluster/README.md +++ b/components/gcp/dataproc/create_cluster/README.md @@ -62,11 +62,7 @@ Note: You can recycle the cluster by using the [Dataproc delete cluster componen To use the component, you must: * Set up the GCP project by following these [steps](https://cloud.google.com/dataproc/docs/guides/setup-project). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: - - ``` - component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. * Grant the following types of access to the Kubeflow user service account: * Read access to the Cloud Storage buckets which contain the initialization action files. * The role, `roles/dataproc.editor`, on the project. @@ -92,7 +88,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_create_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/create_cluster/component.yaml') + dataproc_create_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/create_cluster/component.yaml') help(dataproc_create_cluster_op) ``` @@ -114,7 +110,6 @@ EXPERIMENT_NAME = 'Dataproc - Create Cluster' ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Dataproc create cluster pipeline', @@ -140,7 +135,7 @@ def dataproc_create_cluster_pipeline( config_bucket=config_bucket, image_version=image_version, cluster=cluster, - wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval=wait_interval) ``` #### Compile the pipeline diff --git a/components/gcp/dataproc/create_cluster/component.yaml b/components/gcp/dataproc/create_cluster/component.yaml index 555c9f394a9..f2add9920d9 100644 --- a/components/gcp/dataproc/create_cluster/component.yaml +++ b/components/gcp/dataproc/create_cluster/component.yaml @@ -70,8 +70,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, create_cluster, --project_id, {inputValue: project_id}, --region, {inputValue: region}, @@ -81,10 +82,9 @@ implementation: --config_bucket, {inputValue: config_bucket}, --image_version, {inputValue: image_version}, --cluster, {inputValue: cluster}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: cluster_name: /tmp/kfp/output/dataproc/cluster_name.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/create_cluster/sample.ipynb b/components/gcp/dataproc/create_cluster/sample.ipynb index d5295ce6bc7..8baeda37a4c 100644 --- a/components/gcp/dataproc/create_cluster/sample.ipynb +++ b/components/gcp/dataproc/create_cluster/sample.ipynb @@ -46,11 +46,7 @@ "\n", "To use the component, you must:\n", "* Set up the GCP project by following these [steps](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "\n", - " ```\n", - " component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant the following types of access to the Kubeflow user service account:\n", " * Read access to the Cloud Storage buckets which contains initialization action files.\n", " * The role, `roles/dataproc.editor` on the project.\n", @@ -92,7 +88,7 @@ "import kfp.components as comp\n", "\n", "dataproc_create_cluster_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/create_cluster/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/create_cluster/component.yaml')\n", "help(dataproc_create_cluster_op)" ] }, @@ -137,7 +133,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Dataproc create cluster pipeline',\n", @@ -163,7 +158,7 @@ " config_bucket=config_bucket, \n", " image_version=image_version, \n", " cluster=cluster, \n", - " wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))" + " wait_interval=wait_interval)" ] }, { @@ -248,4 +243,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/dataproc/delete_cluster/README.md b/components/gcp/dataproc/delete_cluster/README.md index a48c3875163..a26600b6a36 100644 --- a/components/gcp/dataproc/delete_cluster/README.md +++ b/components/gcp/dataproc/delete_cluster/README.md @@ -1,68 +1,78 @@ # Name -Data preparation by deleting a cluster in Cloud Dataproc +Component: Data preparation by deleting a cluster in Cloud Dataproc # Label -Cloud Dataproc, cluster, GCP, Cloud Storage, Kubeflow, Pipeline +Cloud Dataproc, Kubeflow # Summary -A Kubeflow Pipeline component to delete a cluster in Cloud Dataproc. +A Kubeflow pipeline component to delete a cluster in Cloud Dataproc. ## Intended use -Use this component at the start of a Kubeflow Pipeline to delete a temporary Cloud Dataproc cluster -to run Cloud Dataproc jobs as steps in the pipeline. This component is usually used with an -[exit handler](https://github.com/kubeflow/pipelines/blob/master/samples/core/exit_handler/exit_handler.py) to run at the end of a pipeline. +Use this component at the start of a Kubeflow pipeline to delete a temporary Cloud Dataproc cluster when running Cloud Dataproc jobs as steps in the pipeline. This component is usually used with an [exit handler](https://github.com/kubeflow/pipelines/blob/master/samples/core/exit_handler/exit_handler.py) to run at the end of a pipeline. +# Facets + +Use case: + +Technique: + +Input data type: + +ML workflow: ## Runtime arguments | Argument | Description | Optional | Data type | Accepted values | Default | -|----------|-------------|----------|-----------|-----------------|---------| -| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | | | -| region | The Cloud Dataproc region in which to handle the request. | No | GCPRegion | | | -| name | The name of the cluster to delete. | No | String | | | -| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | | 30 | +|:----------|:-------------|:----------|:-----------|:-----------------|:---------| +| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | - | - | +| region | The Cloud Dataproc region in which to handle the request. | No | GCPRegion | - | - | +| name | The name of the cluster to delete. | No | String | - | - | +| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | - | 30 | ## Cautions & requirements To use the component, you must: * Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: - - ``` - component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` -* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. +* Grant the Kubeflow user service account the role, `roles/dataproc.editor`, on the project. ## Detailed description This component deletes a Dataproc cluster by using [Dataproc delete cluster REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.clusters/delete). Follow these steps to use the component in a pipeline: -1. Install the Kubeflow Pipeline SDK: +1. Install the Kubeflow pipeline's SDK: -```python -%%capture --no-stderr + ```python + %%capture --no-stderr -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -2. Load the component using KFP SDK +2. Load the component using the Kubeflow pipeline's SDK: -```python -import kfp.components as comp + ```python + import kfp.components as comp -dataproc_delete_cluster_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/delete_cluster/component.yaml') -help(dataproc_delete_cluster_op) -``` + dataproc_delete_cluster_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/delete_cluster/component.yaml') + help(dataproc_delete_cluster_op) + ``` ### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. +The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. #### Prerequisites @@ -72,8 +82,8 @@ Note: The following sample code works in an IPython notebook or directly in Pyth ```python -PROJECT_ID = '' -CLUSTER_NAME = '' +PROJECT_ID = '' +CLUSTER_NAME = '' REGION = 'us-central1' EXPERIMENT_NAME = 'Dataproc - Delete Cluster' @@ -84,7 +94,6 @@ EXPERIMENT_NAME = 'Dataproc - Delete Cluster' ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Dataproc delete cluster pipeline', @@ -98,7 +107,7 @@ def dataproc_delete_cluster_pipeline( dataproc_delete_cluster_op( project_id=project_id, region=region, - name=name).apply(gcp.use_gcp_secret('user-gcp-sa')) + name=name) ``` #### Compile the pipeline @@ -115,10 +124,10 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) diff --git a/components/gcp/dataproc/delete_cluster/component.yaml b/components/gcp/dataproc/delete_cluster/component.yaml index 6f97e1c46a3..9b9282f55db 100644 --- a/components/gcp/dataproc/delete_cluster/component.yaml +++ b/components/gcp/dataproc/delete_cluster/component.yaml @@ -36,7 +36,7 @@ inputs: type: Integer implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ kfp_component.google.dataproc, delete_cluster, --project_id, {inputValue: project_id}, diff --git a/components/gcp/dataproc/delete_cluster/sample.ipynb b/components/gcp/dataproc/delete_cluster/sample.ipynb index feda7414e00..3ffdd2bdf65 100644 --- a/components/gcp/dataproc/delete_cluster/sample.ipynb +++ b/components/gcp/dataproc/delete_cluster/sample.ipynb @@ -33,11 +33,7 @@ "## Cautions & requirements\n", "To use the component, you must:\n", "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "\n", - " ```\n", - " component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", "\n", "## Detailed description\n", @@ -75,7 +71,7 @@ "import kfp.components as comp\n", "\n", "dataproc_delete_cluster_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/delete_cluster/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/delete_cluster/component.yaml')\n", "help(dataproc_delete_cluster_op)" ] }, @@ -125,7 +121,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Dataproc delete cluster pipeline',\n", @@ -139,7 +134,7 @@ " dataproc_delete_cluster_op(\n", " project_id=project_id, \n", " region=region, \n", - " name=name).apply(gcp.use_gcp_secret('user-gcp-sa'))" + " name=name)" ] }, { diff --git a/components/gcp/dataproc/submit_hadoop_job/README.md b/components/gcp/dataproc/submit_hadoop_job/README.md index e9e784efbca..52498b1b003 100644 --- a/components/gcp/dataproc/submit_hadoop_job/README.md +++ b/components/gcp/dataproc/submit_hadoop_job/README.md @@ -1,33 +1,52 @@ # Name -Data preparation using Hadoop MapReduce on YARN with Cloud Dataproc +Component: Data preparation using Hadoop MapReduce on YARN with Cloud Dataproc -# Label -Cloud Dataproc, GCP, Cloud Storage, Hadoop, YARN, Apache, MapReduce +# Labels +Cloud Dataproc, Hadoop, YARN, Apache, MapReduce # Summary -A Kubeflow Pipeline component to prepare data by submitting an Apache Hadoop MapReduce job on Apache Hadoop YARN to Cloud Dataproc. +A Kubeflow pipeline component to prepare data by submitting an Apache Hadoop MapReduce job on Apache Hadoop YARN to Cloud Dataproc. + +# Facets + +Use case: + +Technique: + +Input data type: + +ML workflow: # Details ## Intended use -Use the component to run an Apache Hadoop MapReduce job as one preprocessing step in a Kubeflow Pipeline. +Use the component to run an Apache Hadoop MapReduce job as one preprocessing step in a Kubeflow pipeline. ## Runtime arguments | Argument | Description | Optional | Data type | Accepted values | Default | |----------|-------------|----------|-----------|-----------------|---------| -| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | | | -| region | The Dataproc region to handle the request. | No | GCPRegion | | | -| cluster_name | The name of the cluster to run the job. | No | String | | | -| main_jar_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the JAR file containing the main class to execute. | No | List | | | -| main_class | The name of the driver's main class. The JAR file that contains the class must be either in the default CLASSPATH or specified in `hadoop_job.jarFileUris`. | No | String | | | -| args | The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. | Yes | List | | None | -| hadoop_job | The payload of a [HadoopJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob). | Yes | Dict | | None | -| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None | -| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | | 30 | - -Note: -`main_jar_file_uri`: The examples for the files are : +| project_id | The Google Cloud Platform (GCP) project ID that the cluster belongs to. | No | GCPProjectID | - | - | +| region | The Dataproc region to handle the request. | No | GCPRegion | - | - | +| cluster_name | The name of the cluster to run the job. | No | String | - | - | +| main_jar_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the JAR file containing the main class to execute. | No | List |- |- | +| main_class | The name of the driver's main class. The JAR file that contains the class must be either in the default CLASSPATH or specified in `hadoop_job.jarFileUris`. | No | String |- | - | +| args | The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission. | Yes | List | - | None | +| hadoop_job | The payload of a [HadoopJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob). | Yes | Dict | - | None | +| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | -| None | +| wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | - | 30 | + +Note: + +`main_jar_file_uri`: The examples for the files are: - `gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar` - `hdfs:/tmp/test-samples/custom-wordcount.jarfile:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar` @@ -41,83 +60,68 @@ job_id | The ID of the created job. | String To use the component, you must: * Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). * [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: - - ```python - component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` -* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. +* Grant the Kubeflow user service account the role, `roles/dataproc.editor`, on the project. ## Detailed description -This component creates a Hadoop job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). +This component creates a Hadoop job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). Follow these steps to use the component in a pipeline: -1. Install the Kubeflow Pipeline SDK: - - - -```python -%%capture --no-stderr - -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` +1. Install the Kubeflow pipeline's SDK: -2. Load the component using KFP SDK + ```python + %%capture --no-stderr + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -```python -import kfp.components as comp +2. Load the component using the Kubeflow pipeline's SDK: -dataproc_submit_hadoop_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_hadoop_job/component.yaml') -help(dataproc_submit_hadoop_job_op) -``` + ```python + import kfp.components as comp -## Sample -Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. + dataproc_submit_hadoop_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_hadoop_job/component.yaml') + help(dataproc_submit_hadoop_job_op) + ``` +### Sample +The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. -### Setup a Dataproc cluster +#### Setup a Dataproc cluster [Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code. ### Prepare a Hadoop job -Upload your Hadoop JAR file to a Cloud Storage bucket. In the sample, we will use a JAR file that is preinstalled in the main cluster, so there is no need to provide `main_jar_file_uri`. - -Here is the [WordCount example source code](https://github.com/apache/hadoop/blob/trunk/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordCount.java). +Upload your Hadoop JAR file to a Cloud Storage bucket. In the sample, we will use a JAR file that is preinstalled in the main cluster, so you don't have to provide the argument, `main_jar_file_uri`. -To package a self-contained Hadoop MapReduce application from the source code, follow the [MapReduce Tutorial](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html). - - -### Set sample parameters +To package a self-contained Hadoop MapReduce application from the [WordCount example source code](https://github.com/apache/hadoop/blob/trunk/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordCount.java), follow the [MapReduce Tutorial](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html). +#### Set sample parameters ```python -PROJECT_ID = '' -CLUSTER_NAME = '' -OUTPUT_GCS_PATH = '' +PROJECT_ID = '' +CLUSTER_NAME = '' +OUTPUT_GCS_PATH = '' REGION = 'us-central1' MAIN_CLASS = 'org.apache.hadoop.examples.WordCount' INTPUT_GCS_PATH = 'gs://ml-pipeline-playground/shakespeare1.txt' EXPERIMENT_NAME = 'Dataproc - Submit Hadoop Job' ``` -#### Insepct Input Data +#### Inspect the input data The input file is a simple text file: - ```python !gsutil cat $INTPUT_GCS_PATH ``` -### Clean up the existing output files (optional) -This is needed because the sample code requires the output folder to be a clean folder. To continue to run the sample, make sure that the service account of the notebook server has access to the `OUTPUT_GCS_PATH`. - -CAUTION: This will remove all blob files under `OUTPUT_GCS_PATH`. +#### Clean up the existing output files (optional) +This is needed because the sample code requires the output folder to be a clean folder. To continue to run the sample, make sure that the service account of the notebook server has access to `OUTPUT_GCS_PATH`. +Caution: This will remove all blob files under `OUTPUT_GCS_PATH`. ```python !gsutil rm $OUTPUT_GCS_PATH/** @@ -125,10 +129,8 @@ CAUTION: This will remove all blob files under `OUTPUT_GCS_PATH`. #### Example pipeline that uses the component - ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Dataproc submit Hadoop job pipeline', @@ -157,7 +159,7 @@ def dataproc_submit_hadoop_job_pipeline( args=args, hadoop_job=hadoop_job, job=job, - wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval=wait_interval) ``` #### Compile the pipeline @@ -174,10 +176,10 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) @@ -187,10 +189,9 @@ run_name = pipeline_func.__name__ + ' run' run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arguments) ``` -### Inspect the output +#### Inspect the output The sample in the notebook will count the words in the input text and save them in sharded files. The command to inspect the output is: - ```python !gsutil cat $OUTPUT_GCS_PATH/* ``` @@ -201,5 +202,5 @@ The sample in the notebook will count the words in the input text and save them * [Sample notebook](https://github.com/kubeflow/pipelines/blob/master/components/gcp/dataproc/submit_hadoop_job/sample.ipynb) * [Dataproc HadoopJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HadoopJob) -## License +# License By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/gcp/dataproc/submit_hadoop_job/component.yaml b/components/gcp/dataproc/submit_hadoop_job/component.yaml index f18d11ab9a2..451f3269ca6 100644 --- a/components/gcp/dataproc/submit_hadoop_job/component.yaml +++ b/components/gcp/dataproc/submit_hadoop_job/component.yaml @@ -80,8 +80,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_hadoop_job, --project_id, {inputValue: project_id}, --region, {inputValue: region}, @@ -91,10 +92,9 @@ implementation: --args, {inputValue: args}, --hadoop_job, {inputValue: hadoop_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_hadoop_job/sample.ipynb b/components/gcp/dataproc/submit_hadoop_job/sample.ipynb index 653b1e06d52..aa5d464e8d1 100644 --- a/components/gcp/dataproc/submit_hadoop_job/sample.ipynb +++ b/components/gcp/dataproc/submit_hadoop_job/sample.ipynb @@ -46,11 +46,7 @@ "To use the component, you must:\n", "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "\n", - " ```python\n", - " component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", "\n", "## Detailed description\n", @@ -90,7 +86,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_hadoop_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n", "help(dataproc_submit_hadoop_job_op)" ] }, @@ -186,7 +182,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Dataproc submit Hadoop job pipeline',\n", @@ -215,7 +210,7 @@ " args=args, \n", " hadoop_job=hadoop_job, \n", " job=job, \n", - " wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))" + " wait_interval=wait_interval)" ] }, { @@ -316,4 +311,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/dataproc/submit_hive_job/README.md b/components/gcp/dataproc/submit_hive_job/README.md index 9c15b6e83f2..1dd4e184062 100644 --- a/components/gcp/dataproc/submit_hive_job/README.md +++ b/components/gcp/dataproc/submit_hive_job/README.md @@ -1,16 +1,34 @@ # Name -Data preparation using Apache Hive on YARN with Cloud Dataproc +Component: Data preparation using Apache Hive on YARN with Cloud Dataproc # Label -Cloud Dataproc, GCP, Cloud Storage, YARN, Hive, Apache +Cloud Dataproc, YARN, Apache Hive # Summary -A Kubeflow Pipeline component to prepare data by submitting an Apache Hive job on YARN to Cloud Dataproc. +A Kubeflow pipeline component to prepare data by submitting an Apache Hive job on YARN to Cloud Dataproc. + +# Facets + +Use case: + +Technique: + +Input data type: + +ML workflow: # Details ## Intended use -Use the component to run an Apache Hive job as one preprocessing step in a Kubeflow Pipeline. +Use the component to run an Apache Hive job as one preprocessing step in a Kubeflow pipeline. ## Runtime arguments | Argument | Description | Optional | Data type | Accepted values | Default | @@ -19,9 +37,9 @@ Use the component to run an Apache Hive job as one preprocessing step in a Kubef | region | The Cloud Dataproc region to handle the request. | No | GCPRegion | | | | cluster_name | The name of the cluster to run the job. | No | String | | | | queries | The queries to execute the Hive job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | | None | -| query_file_uri | The HCFS URI of the script that contains the Hive queries. | Yes | GCSPath | | None | +| query_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the script that contains the Hive queries. | Yes | GCSPath | | None | | script_variables | Mapping of the query’s variable names to their values (equivalent to the Hive command: SET name="value";). | Yes | Dict | | None | -| hive_job | The payload of a [HiveJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) | Yes | Dict | | None | +| hive_job | The payload of a [Hive job](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) | Yes | Dict | | None | | job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None | | wait_interval | The number of seconds to pause between polling the operation. | Yes | Integer | | 30 | @@ -34,42 +52,34 @@ job_id | The ID of the created job. | String To use the component, you must: * Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). * [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: - - ``` - component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. * Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. ## Detailed description -This component creates a Hive job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). +This component creates a Hive job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). Follow these steps to use the component in a pipeline: -1. Install the Kubeflow Pipeline SDK: +1. Install the Kubeflow pipeline's SDK: + ```python + %%capture --no-stderr + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -```python -%%capture --no-stderr - -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` - -2. Load the component using KFP SDK - +2. Load the component using the Kubeflow pipeline's SDK: -```python -import kfp.components as comp + ```python + import kfp.components as comp -dataproc_submit_hive_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_hive_job/component.yaml') -help(dataproc_submit_hive_job_op) -``` + dataproc_submit_hive_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_hive_job/component.yaml') + help(dataproc_submit_hive_job_op) + ``` ### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. +The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. #### Setup a Dataproc cluster @@ -78,17 +88,15 @@ Note: The following sample code works in an IPython notebook or directly in Pyth #### Prepare a Hive query -Put your Hive queries in the queries list, or upload your Hive queries into a file saved in a Cloud Storage bucket and then enter the Cloud Storage bucket’s path in `query_file_uri.` In this sample, we will use a hard coded query in the queries list to select data from a public CSV file from Cloud Storage. +You can put your Hive queries in the `queries` list, or you can use `query_file_uri`. In this sample, we will use a hard coded query in the `queries` list to select data from a public CSV file in Cloud Storage. For more details, see the [Hive language manual.](https://cwiki.apache.org/confluence/display/Hive/LanguageManual) - #### Set sample parameters - ```python -PROJECT_ID = '' -CLUSTER_NAME = '' +PROJECT_ID = '' +CLUSTER_NAME = '' REGION = 'us-central1' QUERY = ''' DROP TABLE IF EXISTS natality_csv; @@ -116,7 +124,6 @@ EXPERIMENT_NAME = 'Dataproc - Submit Hive Job' ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Dataproc submit Hive job pipeline', @@ -142,7 +149,7 @@ def dataproc_submit_hive_job_pipeline( script_variables=script_variables, hive_job=hive_job, job=job, - wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval=wait_interval) ``` @@ -160,10 +167,10 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) @@ -174,8 +181,8 @@ run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arg ``` ## References -* [Component python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_submit_hive_job.py) -* [Component docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) +* [Component Python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/dataproc/_submit_hive_job.py) +* [Component Docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) * [Sample notebook](https://github.com/kubeflow/pipelines/blob/master/components/gcp/dataproc/submit_hive_job/sample.ipynb) * [Dataproc HiveJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/HiveJob) diff --git a/components/gcp/dataproc/submit_hive_job/component.yaml b/components/gcp/dataproc/submit_hive_job/component.yaml index 5120514f058..3de85d0c672 100644 --- a/components/gcp/dataproc/submit_hive_job/component.yaml +++ b/components/gcp/dataproc/submit_hive_job/component.yaml @@ -75,8 +75,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_hive_job, --project_id, {inputValue: project_id}, --region, {inputValue: region}, @@ -86,10 +87,9 @@ implementation: --script_variables, {inputValue: script_variables}, --hive_job, {inputValue: hive_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_hive_job/sample.ipynb b/components/gcp/dataproc/submit_hive_job/sample.ipynb index 27e1e4f4eb6..3cd9a87a920 100644 --- a/components/gcp/dataproc/submit_hive_job/sample.ipynb +++ b/components/gcp/dataproc/submit_hive_job/sample.ipynb @@ -39,11 +39,7 @@ "To use the component, you must:\n", "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "\n", - " ```\n", - " component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", "\n", "## Detailed description\n", @@ -81,7 +77,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_hive_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_hive_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_hive_job/component.yaml')\n", "help(dataproc_submit_hive_job_op)" ] }, @@ -156,7 +152,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Dataproc submit Hive job pipeline',\n", @@ -182,7 +177,7 @@ " script_variables=script_variables, \n", " hive_job=hive_job, \n", " job=job, \n", - " wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", + " wait_interval=wait_interval)\n", " " ] }, @@ -267,4 +262,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/dataproc/submit_pig_job/README.md b/components/gcp/dataproc/submit_pig_job/README.md index 921a6efc171..9da02d3a80f 100644 --- a/components/gcp/dataproc/submit_pig_job/README.md +++ b/components/gcp/dataproc/submit_pig_job/README.md @@ -58,11 +58,7 @@ job_id | The ID of the created job. | String To use the component, you must: * Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). * [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: - - ``` - component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. * Grant the Kubeflow user service account the role, `roles/dataproc.editor`, on the project. ## Detailed description @@ -86,7 +82,7 @@ Follow these steps to use the component in a pipeline: ```python import kfp.components as comp - dataproc_submit_pig_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_pig_job/component.yaml') + dataproc_submit_pig_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_pig_job/component.yaml') help(dataproc_submit_pig_job_op) ``` @@ -124,7 +120,6 @@ EXPERIMENT_NAME = 'Dataproc - Submit Pig Job' ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Dataproc submit Pig job pipeline', @@ -150,7 +145,7 @@ def dataproc_submit_pig_job_pipeline( script_variables=script_variables, pig_job=pig_job, job=job, - wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval=wait_interval) ``` diff --git a/components/gcp/dataproc/submit_pig_job/component.yaml b/components/gcp/dataproc/submit_pig_job/component.yaml index f7df48896b4..a1e718a4387 100644 --- a/components/gcp/dataproc/submit_pig_job/component.yaml +++ b/components/gcp/dataproc/submit_pig_job/component.yaml @@ -75,8 +75,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_pig_job, --project_id, {inputValue: project_id}, --region, {inputValue: region}, @@ -86,10 +87,9 @@ implementation: --script_variables, {inputValue: script_variables}, --pig_job, {inputValue: pig_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_pig_job/sample.ipynb b/components/gcp/dataproc/submit_pig_job/sample.ipynb index e3a518b5e89..7fbf288313e 100644 --- a/components/gcp/dataproc/submit_pig_job/sample.ipynb +++ b/components/gcp/dataproc/submit_pig_job/sample.ipynb @@ -42,11 +42,7 @@ "To use the component, you must:\n", "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "\n", - " ```\n", - " component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", "\n", "## Detailed description\n", @@ -84,7 +80,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_pig_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_pig_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_pig_job/component.yaml')\n", "help(dataproc_submit_pig_job_op)" ] }, @@ -146,7 +142,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Dataproc submit Pig job pipeline',\n", @@ -172,7 +167,7 @@ " script_variables=script_variables, \n", " pig_job=pig_job, \n", " job=job, \n", - " wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", + " wait_interval=wait_interval)\n", " " ] }, @@ -257,4 +252,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/dataproc/submit_pyspark_job/README.md b/components/gcp/dataproc/submit_pyspark_job/README.md index 864381b8728..233b43e7e7b 100644 --- a/components/gcp/dataproc/submit_pyspark_job/README.md +++ b/components/gcp/dataproc/submit_pyspark_job/README.md @@ -1,31 +1,48 @@ # Name -Data preparation using PySpark on Cloud Dataproc +Component: Data preparation using PySpark on Cloud Dataproc -# Label -Cloud Dataproc, GCP, Cloud Storage,PySpark, Kubeflow, pipelines, components +# Labels +Cloud Dataproc, PySpark, Kubeflow # Summary A Kubeflow Pipeline component to prepare data by submitting a PySpark job to Cloud Dataproc. +# Facets + +Use case: + +Technique: + +Input data type: + +ML workflow: # Details ## Intended use -Use the component to run an Apache PySpark job as one preprocessing step in a Kubeflow Pipeline. +Use this component to run an Apache PySpark job as one preprocessing step in a Kubeflow pipeline. ## Runtime arguments | Argument | Description | Optional | Data type | Accepted values | Default | -|----------------------|------------|----------|--------------|-----------------|---------| -| project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No | GCPProjectID | | | -| region | The Cloud Dataproc region to handle the request. | No | GCPRegion | | | -| cluster_name | The name of the cluster to run the job. | No | String | | | -| main_python_file_uri | The HCFS URI of the Python file to use as the driver. This must be a .py file. | No | GCSPath | | | -| args | The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. | Yes | List | | None | -| pyspark_job | The payload of a [PySparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PySparkJob). | Yes | Dict | | None | -| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None | +|:----------------------|:------------|:----------|:--------------|:-----------------|:---------| +| project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No | GCPProjectID | - | - | +| region | The Cloud Dataproc region to handle the request. | No | GCPRegion | - | - | +| cluster_name | The name of the cluster to run the job. | No | String | - | - | +| main_python_file_uri | The HCFS URI of the Python file to use as the driver. This must be a .py file. | No | GCSPath | - | - | +| args | The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission. | Yes | List | - | None | +| pyspark_job | The payload of a [PySparkJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/PySparkJob). | Yes | Dict | - | None | +| job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | - | None | ## Output Name | Description | Type @@ -37,11 +54,7 @@ job_id | The ID of the created job. | String To use the component, you must: * Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). * [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: - - ``` - component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. * Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. ## Detailed description @@ -50,30 +63,29 @@ This component creates a PySpark job from the [Dataproc submit job REST API](htt Follow these steps to use the component in a pipeline: -1. Install the Kubeflow Pipeline SDK: +1. Install the Kubeflow pipeline's SDK: -```python -%%capture --no-stderr + ```python + %%capture --no-stderr -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -2. Load the component using KFP SDK +2. Load the Kubeflow pipeline's SDK: -```python -import kfp.components as comp + ```python + import kfp.components as comp -dataproc_submit_pyspark_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_pyspark_job/component.yaml') -help(dataproc_submit_pyspark_job_op) -``` + dataproc_submit_pyspark_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_pyspark_job/component.yaml') + help(dataproc_submit_pyspark_job_op) + ``` ### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. +The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. #### Setup a Dataproc cluster @@ -85,17 +97,15 @@ Note: The following sample code works in an IPython notebook or directly in Pyth Upload your PySpark code file to a Cloud Storage bucket. For example, this is a publicly accessible `hello-world.py` in Cloud Storage: - ```python !gsutil cat gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py ``` #### Set sample parameters - ```python -PROJECT_ID = '' -CLUSTER_NAME = '' +PROJECT_ID = '' +CLUSTER_NAME = '' REGION = 'us-central1' PYSPARK_FILE_URI = 'gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py' ARGS = '' @@ -104,10 +114,8 @@ EXPERIMENT_NAME = 'Dataproc - Submit PySpark Job' #### Example pipeline that uses the component - ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Dataproc submit PySpark job pipeline', @@ -131,7 +139,7 @@ def dataproc_submit_pyspark_job_pipeline( args=args, pyspark_job=pyspark_job, job=job, - wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval=wait_interval) ``` @@ -147,12 +155,11 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) #### Submit the pipeline for execution - ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) diff --git a/components/gcp/dataproc/submit_pyspark_job/component.yaml b/components/gcp/dataproc/submit_pyspark_job/component.yaml index 526b0bf7158..5e9db428d1c 100644 --- a/components/gcp/dataproc/submit_pyspark_job/component.yaml +++ b/components/gcp/dataproc/submit_pyspark_job/component.yaml @@ -69,8 +69,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_pyspark_job, --project_id, {inputValue: project_id}, --region, {inputValue: region}, @@ -79,10 +80,9 @@ implementation: --args, {inputValue: args}, --pyspark_job, {inputValue: pyspark_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_pyspark_job/sample.ipynb b/components/gcp/dataproc/submit_pyspark_job/sample.ipynb index c79de4ae1ec..8c96cfec795 100644 --- a/components/gcp/dataproc/submit_pyspark_job/sample.ipynb +++ b/components/gcp/dataproc/submit_pyspark_job/sample.ipynb @@ -42,11 +42,7 @@ "To use the component, you must:\n", "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "\n", - " ```\n", - " component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", "\n", "## Detailed description\n", @@ -86,7 +82,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_pyspark_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n", "help(dataproc_submit_pyspark_job_op)" ] }, @@ -157,7 +153,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Dataproc submit PySpark job pipeline',\n", @@ -181,7 +176,7 @@ " args=args, \n", " pyspark_job=pyspark_job, \n", " job=job, \n", - " wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", + " wait_interval=wait_interval)\n", " " ] }, @@ -266,4 +261,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/dataproc/submit_spark_job/README.md b/components/gcp/dataproc/submit_spark_job/README.md index 7b0fbd23bf7..3b73206910c 100644 --- a/components/gcp/dataproc/submit_spark_job/README.md +++ b/components/gcp/dataproc/submit_spark_job/README.md @@ -66,13 +66,7 @@ To use the component, you must: * Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). * [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts), in a Kubeflow cluster. For example: - - ``` - component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` - - +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. * Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. @@ -101,7 +95,7 @@ Follow these steps to use the component in a pipeline: import kfp.components as comp dataproc_submit_spark_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_spark_job/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_spark_job/component.yaml') help(dataproc_submit_spark_job_op) ``` @@ -139,7 +133,6 @@ EXPERIMENT_NAME = 'Dataproc - Submit Spark Job' ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Dataproc submit Spark job pipeline', @@ -165,7 +158,7 @@ def dataproc_submit_spark_job_pipeline( args=args, spark_job=spark_job, job=job, - wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval=wait_interval) ``` diff --git a/components/gcp/dataproc/submit_spark_job/component.yaml b/components/gcp/dataproc/submit_spark_job/component.yaml index 1a8f2b05210..4200f6a461b 100644 --- a/components/gcp/dataproc/submit_spark_job/component.yaml +++ b/components/gcp/dataproc/submit_spark_job/component.yaml @@ -76,8 +76,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_spark_job, --project_id, {inputValue: project_id}, --region, {inputValue: region}, @@ -87,10 +88,9 @@ implementation: --args, {inputValue: args}, --spark_job, {inputValue: spark_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_spark_job/sample.ipynb b/components/gcp/dataproc/submit_spark_job/sample.ipynb index 907c2ba8ca5..253201820ee 100644 --- a/components/gcp/dataproc/submit_spark_job/sample.ipynb +++ b/components/gcp/dataproc/submit_spark_job/sample.ipynb @@ -50,13 +50,7 @@ "\n", "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "\n", - " ```\n", - " component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", - "\n", - "\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", "\n", "\n", @@ -99,7 +93,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_spark_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_spark_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_spark_job/component.yaml')\n", "help(dataproc_submit_spark_job_op)" ] }, @@ -159,7 +153,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Dataproc submit Spark job pipeline',\n", @@ -185,7 +178,7 @@ " args=args, \n", " spark_job=spark_job, \n", " job=job, \n", - " wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", + " wait_interval=wait_interval)\n", " " ] }, @@ -271,4 +264,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/dataproc/submit_sparksql_job/README.md b/components/gcp/dataproc/submit_sparksql_job/README.md index 2d1256a357b..6393a01c403 100644 --- a/components/gcp/dataproc/submit_sparksql_job/README.md +++ b/components/gcp/dataproc/submit_sparksql_job/README.md @@ -1,30 +1,48 @@ # Name -Data preparation using SparkSQL on YARN with Cloud Dataproc +Component: Data preparation using SparkSQL on YARN with Cloud Dataproc # Label -Cloud Dataproc, GCP, Cloud Storage, YARN, SparkSQL, Kubeflow, pipelines, components +Cloud Dataproc, YARN, SparkSQL, Kubeflow # Summary -A Kubeflow Pipeline component to prepare data by submitting a SparkSql job on YARN to Cloud Dataproc. +A Kubeflow pipeline component to prepare data by submitting a SparkSql job on YARN to Cloud Dataproc. + +# Facets + +Use case: + +Technique: + +Input data type: + +ML workflow: # Details ## Intended use -Use the component to run an Apache SparkSql job as one preprocessing step in a Kubeflow Pipeline. +Use the component to run an Apache SparkSql job as one preprocessing step in a Kubeflow pipeline. ## Runtime arguments Argument| Description | Optional | Data type| Accepted values| Default | :--- | :---------- | :--- | :------- | :------ | :------ -project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No| GCPProjectID | | | -region | The Cloud Dataproc region to handle the request. | No | GCPRegion| -cluster_name | The name of the cluster to run the job. | No | String| | | -queries | The queries to execute the SparkSQL job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | | None | -query_file_uri | The HCFS URI of the script that contains the SparkSQL queries.| Yes | GCSPath | | None | -script_variables | Mapping of the query’s variable names to their values (equivalent to the SparkSQL command: SET name="value";).| Yes| Dict | | None | -sparksql_job | The payload of a [SparkSqlJob](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob). | Yes | Dict | | None | -job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | | None | -wait_interval | The number of seconds to pause between polling the operation. | Yes |Integer | | 30 | +project_id | The ID of the Google Cloud Platform (GCP) project that the cluster belongs to. | No| GCPProjectID | - | -| +region | The Cloud Dataproc region to handle the request. | No | GCPRegion|-|- +cluster_name | The name of the cluster to run the job. | No | String| -| -| +queries | The queries to execute the SparkSQL job. Specify multiple queries in one string by separating them with semicolons. You do not need to terminate queries with semicolons. | Yes | List | - | None | +query_file_uri | The Hadoop Compatible Filesystem (HCFS) URI of the script that contains the SparkSQL queries. The SparkSQL queries are listed in a CSV file that is stored in a Cloud Storage bucket.| Yes | GCSPath | - | None | +script_variables | Mapping of the query’s variable names to their values (equivalent to the SparkSQL command: SET name="value";).| Yes| Dict |- | None | +sparksql_job | The payload of a [SparkSql job](https://cloud.google.com/dataproc/docs/reference/rest/v1/SparkSqlJob). | Yes | Dict | - | None | +job | The payload of a [Dataproc job](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs). | Yes | Dict | - | None | +wait_interval | The number of seconds to pause between polling the operation. | Yes |Integer | - | 30 | ## Output Name | Description | Type @@ -35,55 +53,48 @@ job_id | The ID of the created job. | String To use the component, you must: * Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project). * [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: -``` -component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) -``` -* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project. +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. +* Grant the Kubeflow user service account the role, `roles/dataproc.editor`, on the project. ## Detailed Description -This component creates a Pig job from [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). +This component creates a SparkSql job from the [Dataproc submit job REST API](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/submit). Follow these steps to use the component in a pipeline: -1. Install the Kubeflow Pipeline SDK: +1. Install the Kubeflow pipeline's SDK: + ```python + %%capture --no-stderr -```python -%%capture --no-stderr + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` +2. Load the component using the Kubeflow pipeline's SDK: -2. Load the component using KFP SDK + ```python + import kfp.components as comp - -```python -import kfp.components as comp - -dataproc_submit_sparksql_job_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_sparksql_job/component.yaml') -help(dataproc_submit_sparksql_job_op) -``` + dataproc_submit_sparksql_job_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_sparksql_job/component.yaml') + help(dataproc_submit_sparksql_job_op) + ``` ### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. +The following sample code works in an IPython notebook or directly in Python code. See the sample code below to learn how to execute the template. #### Setup a Dataproc cluster [Create a new Dataproc cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster) (or reuse an existing one) before running the sample code. #### Prepare a SparkSQL job -Either put your SparkSQL queries in the `queires` list, or upload your SparkSQL queries into a file to a Cloud Storage bucket and then enter the Cloud Storage bucket’s path in `query_file_uri`. In this sample, we will use a hard coded query in the `queries` list to select data from a public CSV file from Cloud Storage. +You can put your SparkSQL queries in the `queries` list, or you can use `query_file_uri`. In this sample, we will use a hard coded query in the `queries` list to select data from a public CSV file in Cloud Storage. -For more details about Spark SQL, see [Spark SQL, DataFrames and Datasets Guide](https://spark.apache.org/docs/latest/sql-programming-guide.html) +For more details about Spark SQL, see [Spark SQL, DataFrames and Datasets Guide](https://spark.apache.org/docs/latest/sql-programming-guide.html). #### Set sample parameters - ```python -PROJECT_ID = '' -CLUSTER_NAME = '' +PROJECT_ID = '' +CLUSTER_NAME = '' REGION = 'us-central1' QUERY = ''' DROP TABLE IF EXISTS natality_csv; @@ -108,10 +119,8 @@ EXPERIMENT_NAME = 'Dataproc - Submit SparkSQL Job' #### Example pipeline that uses the component - ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='Dataproc submit SparkSQL job pipeline', @@ -137,13 +146,12 @@ def dataproc_submit_sparksql_job_pipeline( script_variables=script_variables, sparksql_job=sparksql_job, job=job, - wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval=wait_interval) ``` #### Compile the pipeline - ```python pipeline_func = dataproc_submit_sparksql_job_pipeline pipeline_filename = pipeline_func.__name__ + '.zip' @@ -153,12 +161,11 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) #### Submit the pipeline for execution - ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) diff --git a/components/gcp/dataproc/submit_sparksql_job/component.yaml b/components/gcp/dataproc/submit_sparksql_job/component.yaml index f53f6859ab7..3cffc8d5db8 100644 --- a/components/gcp/dataproc/submit_sparksql_job/component.yaml +++ b/components/gcp/dataproc/submit_sparksql_job/component.yaml @@ -75,8 +75,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.dataproc, submit_sparksql_job, --project_id, {inputValue: project_id}, --region, {inputValue: region}, @@ -86,10 +87,9 @@ implementation: --script_variables, {inputValue: script_variables}, --sparksql_job, {inputValue: sparksql_job}, --job, {inputValue: job}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/dataproc/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/dataproc/submit_sparksql_job/sample.ipynb b/components/gcp/dataproc/submit_sparksql_job/sample.ipynb index b037c2cf0e8..21de17a2c27 100644 --- a/components/gcp/dataproc/submit_sparksql_job/sample.ipynb +++ b/components/gcp/dataproc/submit_sparksql_job/sample.ipynb @@ -40,10 +40,7 @@ "To use the component, you must:\n", "* Set up a GCP project by following this [guide](https://cloud.google.com/dataproc/docs/guides/setup-project).\n", "* [Create a new cluster](https://cloud.google.com/dataproc/docs/guides/create-cluster).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "```\n", - "component_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - "```\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant the Kubeflow user service account the role `roles/dataproc.editor` on the project.\n", "\n", "## Detailed Description\n", @@ -81,7 +78,7 @@ "import kfp.components as comp\n", "\n", "dataproc_submit_sparksql_job_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n", "help(dataproc_submit_sparksql_job_op)" ] }, @@ -152,7 +149,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='Dataproc submit SparkSQL job pipeline',\n", @@ -178,7 +174,7 @@ " script_variables=script_variables, \n", " sparksql_job=sparksql_job, \n", " job=job, \n", - " wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", + " wait_interval=wait_interval)\n", " " ] }, @@ -263,4 +259,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/ml_engine/batch_predict/README.md b/components/gcp/ml_engine/batch_predict/README.md index a3f2d21a0ca..690c0fae680 100644 --- a/components/gcp/ml_engine/batch_predict/README.md +++ b/components/gcp/ml_engine/batch_predict/README.md @@ -49,7 +49,6 @@ The component accepts the following as input: Name | Description | Type :--- | :---------- | :--- job_id | The ID of the created batch job. | String -output_path | The output path of the batch prediction job | GCSPath ## Cautions & requirements @@ -57,13 +56,7 @@ output_path | The output path of the batch prediction job | GCSPath To use the component, you must: * Set up a cloud environment by following this [guide](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: - - ```python - mlengine_predict_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` - - +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. * Grant the following types of access to the Kubeflow user service account: * Read access to the Cloud Storage buckets which contains the input data. * Write access to the Cloud Storage bucket of the output directory. @@ -94,7 +87,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp mlengine_batch_predict_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/batch_predict/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/batch_predict/component.yaml') help(mlengine_batch_predict_op) ``` @@ -132,7 +125,6 @@ OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/batch_predict/output/' ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='CloudML batch predict pipeline', @@ -161,7 +153,7 @@ def pipeline( output_data_format=output_data_format, prediction_input=prediction_input, job_id_prefix=job_id_prefix, - wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval=wait_interval) ``` #### Compile the pipeline diff --git a/components/gcp/ml_engine/batch_predict/component.yaml b/components/gcp/ml_engine/batch_predict/component.yaml index 06afc0f9b30..a5f7e91e233 100644 --- a/components/gcp/ml_engine/batch_predict/component.yaml +++ b/components/gcp/ml_engine/batch_predict/component.yaml @@ -69,8 +69,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.ml_engine, batch_predict, --project_id, {inputValue: project_id}, --model_path, {inputValue: model_path}, @@ -81,10 +82,9 @@ implementation: --output_data_format, {inputValue: output_data_format}, --prediction_input, {inputValue: prediction_input}, --job_id_prefix, {inputValue: job_id_prefix}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/ml_engine/job_id.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/ml_engine/batch_predict/sample.ipynb b/components/gcp/ml_engine/batch_predict/sample.ipynb index 99980c832bc..79355748809 100644 --- a/components/gcp/ml_engine/batch_predict/sample.ipynb +++ b/components/gcp/ml_engine/batch_predict/sample.ipynb @@ -62,13 +62,7 @@ "To use the component, you must:\n", "\n", "* Set up a cloud environment by following this [guide](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "\n", - " ```python\n", - " mlengine_predict_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", - "\n", - "\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant the following types of access to the Kubeflow user service account:\n", " * Read access to the Cloud Storage buckets which contains the input data.\n", " * Write access to the Cloud Storage bucket of the output directory.\n", @@ -112,7 +106,7 @@ "import kfp.components as comp\n", "\n", "mlengine_batch_predict_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/batch_predict/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/batch_predict/component.yaml')\n", "help(mlengine_batch_predict_op)" ] }, @@ -185,7 +179,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='CloudML batch predict pipeline',\n", @@ -214,7 +207,7 @@ " output_data_format=output_data_format, \n", " prediction_input=prediction_input, \n", " job_id_prefix=job_id_prefix,\n", - " wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))" + " wait_interval=wait_interval)" ] }, { @@ -315,4 +308,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/ml_engine/deploy/README.md b/components/gcp/ml_engine/deploy/README.md index 25f038acaf9..84fd3b023a1 100644 --- a/components/gcp/ml_engine/deploy/README.md +++ b/components/gcp/ml_engine/deploy/README.md @@ -59,10 +59,11 @@ The accepted file formats are: `model_uri` can also be an [Estimator export base directory, ](https://www.tensorflow.org/guide/saved_model#perform_the_export)which contains a list of subdirectories named by timestamp. The directory with the latest timestamp is used to load the trained model file. ## Output -| Name | Description | Type | -|:------- |:---- | :--- | -| job_id | The ID of the created job. | String | -| job_dir | The Cloud Storage path that contains the trained model output files. | GCSPath | +Name | Description | Type +:--- | :---------- | :--- +| model_uri | The Cloud Storage URI of the trained model. | GCSPath | +| model_name | The name of the deployed model. | String | +| version_name | The name of the deployed version. | String | ## Cautions & requirements @@ -70,14 +71,7 @@ The accepted file formats are: To use the component, you must: * [Set up the cloud environment](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: - - ``` - ```python - mlengine_deploy_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - - ``` - +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. * Grant read access to the Cloud Storage bucket that contains the trained model to the Kubeflow user service account. ## Detailed description @@ -110,7 +104,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar import kfp.components as comp mlengine_deploy_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/deploy/component.yaml') + 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/deploy/component.yaml') help(mlengine_deploy_op) ``` @@ -136,7 +130,6 @@ TRAINED_MODEL_PATH = 'gs://ml-pipeline-playground/samples/ml_engine/census/train ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='CloudML deploy pipeline', @@ -163,7 +156,7 @@ def pipeline( version=version, replace_existing_version=replace_existing_version, set_default=set_default, - wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval=wait_interval) ``` #### Compile the pipeline diff --git a/components/gcp/ml_engine/deploy/component.yaml b/components/gcp/ml_engine/deploy/component.yaml index 216b7d5f347..e2e399fa66f 100644 --- a/components/gcp/ml_engine/deploy/component.yaml +++ b/components/gcp/ml_engine/deploy/component.yaml @@ -95,8 +95,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.ml_engine, deploy, --model_uri, {inputValue: model_uri}, --project_id, {inputValue: project_id}, @@ -116,4 +117,3 @@ implementation: model_uri: /tmp/kfp/output/ml_engine/model_uri.txt model_name: /tmp/kfp/output/ml_engine/model_name.txt version_name: /tmp/kfp/output/ml_engine/version_name.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/ml_engine/deploy/sample.ipynb b/components/gcp/ml_engine/deploy/sample.ipynb index 43858ff6e93..50f583cee34 100644 --- a/components/gcp/ml_engine/deploy/sample.ipynb +++ b/components/gcp/ml_engine/deploy/sample.ipynb @@ -75,14 +75,7 @@ "To use the component, you must:\n", "\n", "* [Set up the cloud environment](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "\n", - " ```\n", - " ```python\n", - " mlengine_deploy_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - "\n", - " ```\n", - "\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant read access to the Cloud Storage bucket that contains the trained model to the Kubeflow user service account.\n", "\n", "## Detailed description\n", @@ -128,7 +121,7 @@ "import kfp.components as comp\n", "\n", "mlengine_deploy_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/deploy/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/deploy/component.yaml')\n", "help(mlengine_deploy_op)" ] }, @@ -176,7 +169,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='CloudML deploy pipeline',\n", @@ -203,7 +195,7 @@ " version=version, \n", " replace_existing_version=replace_existing_version, \n", " set_default=set_default, \n", - " wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))" + " wait_interval=wait_interval)" ] }, { @@ -288,4 +280,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/gcp/ml_engine/train/README.md b/components/gcp/ml_engine/train/README.md index 81c11e0f3aa..3fecc9f82bd 100644 --- a/components/gcp/ml_engine/train/README.md +++ b/components/gcp/ml_engine/train/README.md @@ -1,33 +1,55 @@ # Name -Submitting a Cloud Machine Learning Engine training job as a pipeline step +Component: Submitting an AI Platform training job as a pipeline step # Label -GCP, Cloud ML Engine, Machine Learning, pipeline, component, Kubeflow, Kubeflow Pipeline + AI Platform, Kubeflow # Summary -A Kubeflow Pipeline component to submit a Cloud ML Engine training job as a step in a pipeline. +A Kubeflow pipeline component to submit an AI Platform training job as a step in a pipeline. + +# Facets + +Use case: +Other + +Technique: +Other + +Input data type: +Tabular + +ML workflow: +Training # Details ## Intended use -Use this component to submit a training job to Cloud ML Engine from a Kubeflow Pipeline. +Use this component to submit a training job to AI Platform from a Kubeflow pipeline. ## Runtime arguments | Argument | Description | Optional | Data type | Accepted values | Default | |:------------------|:------------------|:----------|:--------------|:-----------------|:-------------| -| project_id | The ID of the Google Cloud Platform (GCP) project of the job. | No | GCPProjectID | | | -| python_module | The name of the Python module to run after installing the training program. | Yes | String | | None | -| package_uris | The Cloud Storage location of the packages that contain the training program and any additional dependencies. The maximum number of package URIs is 100. | Yes | List | | None | -| region | The Compute Engine region in which the training job is run. | Yes | GCPRegion | | us-central1 | -| args | The command line arguments to pass to the training program. | Yes | List | | None | -| job_dir | A Cloud Storage path in which to store the training outputs and other data needed for training. This path is passed to your TensorFlow program as the `job-dir` command-line argument. The benefit of specifying this field is that Cloud ML validates the path for use in training. | Yes | GCSPath | | None | -| python_version | The version of Python used in training. If it is not set, the default version is 2.7. Python 3.5 is available when the runtime version is set to 1.4 and above. | Yes | String | | None | -| runtime_version | The runtime version of Cloud ML Engine to use for training. If it is not set, Cloud ML Engine uses the default. | Yes | String | | 1 | -| master_image_uri | The Docker image to run on the master replica. This image must be in Container Registry. | Yes | GCRPath | | None | -| worker_image_uri | The Docker image to run on the worker replica. This image must be in Container Registry. | Yes | GCRPath | | None | +| project_id | The Google Cloud Platform (GCP) project ID of the job. | No | GCPProjectID | - | - | +| python_module | The name of the Python module to run after installing the training program. | Yes | String | - | None | +| package_uris | The Cloud Storage location of the packages that contain the training program and any additional dependencies. The maximum number of package URIs is 100. | Yes | List | -| None | +| region | The Compute Engine region in which the training job is run. | Yes | GCPRegion | -| us-central1 | +| args | The command line arguments to pass to the training program. | Yes | List | - | None | +| job_dir | A Cloud Storage path in which to store the training outputs and other data needed for training. This path is passed to your TensorFlow program as the command-line argument, `job-dir`. The benefit of specifying this field is that Cloud ML validates the path for use in training. | Yes | GCSPath | - | None | +| python_version | The version of Python used in training. If it is not set, the default version is 2.7. Python 3.5 is available when the runtime version is set to 1.4 and above. | Yes | String | - | None | +| runtime_version | The runtime version of AI Platform to use for training. If it is not set, AI Platform uses the default. | Yes | String | - | 1 | +| master_image_uri | The Docker image to run on the master replica. This image must be in Container Registry. | Yes | GCRPath | - | None | +| worker_image_uri | The Docker image to run on the worker replica. This image must be in Container Registry. | Yes | GCRPath |- | None | | training_input | The input parameters to create a training job. | Yes | Dict | [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) | None | -| job_id_prefix | The prefix of the job ID that is generated. | Yes | String | | None | -| wait_interval | The number of seconds to wait between API calls to get the status of the job. | Yes | Integer | | 30 | +| job_id_prefix | The prefix of the job ID that is generated. | Yes | String | - | None | +| wait_interval | The number of seconds to wait between API calls to get the status of the job. | Yes | Integer | - | 30 | @@ -43,7 +65,7 @@ The component accepts two types of inputs: | Name | Description | Type | |:------- |:---- | :--- | | job_id | The ID of the created job. | String | -| job_dir | The Cloud Storage path that contains the trained model output files. | GCSPath | +| job_dir | The Cloud Storage path that contains the output files with the trained model. | GCSPath | ## Cautions & requirements @@ -51,63 +73,52 @@ The component accepts two types of inputs: To use the component, you must: * Set up a cloud environment by following this [guide](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup). -* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example: - - ``` - mlengine_train_op(...).apply(gcp.use_gcp_secret('user-gcp-sa')) - ``` - +* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details. * Grant the following access to the Kubeflow user service account: * Read access to the Cloud Storage buckets which contain the input data, packages, or Docker images. * Write access to the Cloud Storage bucket of the output directory. ## Detailed description -The component builds the [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) payload and submits a job via the [Cloud ML Engine REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs). +The component builds the [TrainingInput](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#TrainingInput) payload and submits a job via the [AI Platform REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs). The steps to use the component in a pipeline are: -1. Install the Kubeflow Pipeline SDK: - - - -```python -%%capture --no-stderr - -KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' -!pip3 install $KFP_PACKAGE --upgrade -``` +1. Install the Kubeflow pipeline's SDK: -2. Load the component using KFP SDK + ```python + %%capture --no-stderr + KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar.gz' + !pip3 install $KFP_PACKAGE --upgrade + ``` -```python -import kfp.components as comp +2. Load the component using the Kubeflow pipeline's SDK: -mlengine_train_op = comp.load_component_from_url( - 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/train/component.yaml') -help(mlengine_train_op) -``` + ```python + import kfp.components as comp + mlengine_train_op = comp.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/train/component.yaml') + help(mlengine_train_op) + ``` ### Sample -Note: The following sample code works in an IPython notebook or directly in Python code. +The following sample code works in an IPython notebook or directly in Python code. -In this sample, you use the code from the [census estimator sample](https://github.com/GoogleCloudPlatform/cloudml-samples/tree/master/census/estimator) to train a model in Cloud ML Engine. To upload the code to Cloud ML Engine, package the Python code and upload it to a Cloud Storage bucket. +In this sample, you use the code from the [census estimator sample](https://github.com/GoogleCloudPlatform/cloudml-samples/tree/master/census/estimator) to train a model on AI Platform. To upload the code to AI Platform, package the Python code and upload it to a Cloud Storage bucket. Note: You must have read and write permissions on the bucket that you use as the working directory. -#### Set sample parameters +#### Set sample parameters ```python -# Required Parameters -PROJECT_ID = '' -GCS_WORKING_DIR = 'gs://' # No ending slash +# Required parameters +PROJECT_ID = '' +GCS_WORKING_DIR = 'gs://' # No ending slash ``` - ```python -# Optional Parameters +# Optional parameters EXPERIMENT_NAME = 'CLOUDML - Train' TRAINER_GCS_PATH = GCS_WORKING_DIR + '/train/trainer.tar.gz' OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/train/output/' @@ -115,14 +126,12 @@ OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/train/output/' #### Clean up the working directory - ```python %%capture --no-stderr !gsutil rm -r $GCS_WORKING_DIR ``` -#### Download the sample trainer code to local - +#### Download the sample trainer code to a local directory ```python %%capture --no-stderr @@ -132,7 +141,6 @@ OUTPUT_GCS_PATH = GCS_WORKING_DIR + '/train/output/' #### Package code and upload the package to Cloud Storage - ```python %%capture --no-stderr %%bash -s "$TRAINER_GCS_PATH" @@ -145,10 +153,8 @@ rm -fr ./cloudml-samples-master/ ./master.zip ./dist #### Example pipeline that uses the component - ```python import kfp.dsl as dsl -import kfp.gcp as gcp import json @dsl.pipeline( name='CloudML training pipeline', @@ -187,12 +193,11 @@ def pipeline( worker_image_uri=worker_image_uri, training_input=training_input, job_id_prefix=job_id_prefix, - wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa')) + wait_interval=wait_interval) ``` #### Compile the pipeline - ```python pipeline_func = pipeline pipeline_filename = pipeline_func.__name__ + '.zip' @@ -202,12 +207,11 @@ compiler.Compiler().compile(pipeline_func, pipeline_filename) #### Submit the pipeline for execution - ```python -#Specify pipeline argument values +#Specify values for the pipeline's arguments arguments = {} -#Get or create an experiment and submit a pipeline run +#Get or create an experiment import kfp client = kfp.Client() experiment = client.create_experiment(EXPERIMENT_NAME) @@ -221,16 +225,15 @@ run_result = client.run_pipeline(experiment.id, run_name, pipeline_filename, arg Use the following command to inspect the contents in the output directory: - ```python !gsutil ls $OUTPUT_GCS_PATH ``` ## References -* [Component python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_train.py) -* [Component docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) +* [Component Python code](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/component_sdk/python/kfp_component/google/ml_engine/_train.py) +* [Component Docker file](https://github.com/kubeflow/pipelines/blob/master/components/gcp/container/Dockerfile) * [Sample notebook](https://github.com/kubeflow/pipelines/blob/master/components/gcp/ml_engine/train/sample.ipynb) -* [Cloud Machine Learning Engine job REST API](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs) +* [AI Platform REST API - Resource: Job](https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs) ## License By deploying or using this software you agree to comply with the [AI Hub Terms of Service](https://aihub.cloud.google.com/u/0/aihub-tos) and the [Google APIs Terms of Service](https://developers.google.com/terms/). To the extent of a direct conflict of terms, the AI Hub Terms of Service will control. diff --git a/components/gcp/ml_engine/train/component.yaml b/components/gcp/ml_engine/train/component.yaml index e7d6d384245..5d26e1554c9 100644 --- a/components/gcp/ml_engine/train/component.yaml +++ b/components/gcp/ml_engine/train/component.yaml @@ -103,8 +103,9 @@ outputs: type: UI metadata implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-gcp:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-gcp:ad9bd5648dd0453005225779f25d8cebebc7ca00 args: [ + --ui_metadata_path, {outputPath: MLPipeline UI metadata}, kfp_component.google.ml_engine, train, --project_id, {inputValue: project_id}, --python_module, {inputValue: python_module}, @@ -118,11 +119,10 @@ implementation: --worker_image_uri, {inputValue: worker_image_uri}, --training_input, {inputValue: training_input}, --job_id_prefix, {inputValue: job_id_prefix}, - --wait_interval, {inputValue: wait_interval} + --wait_interval, {inputValue: wait_interval}, ] env: KFP_POD_NAME: "{{pod.name}}" fileOutputs: job_id: /tmp/kfp/output/ml_engine/job_id.txt job_dir: /tmp/kfp/output/ml_engine/job_dir.txt - MLPipeline UI metadata: /mlpipeline-ui-metadata.json diff --git a/components/gcp/ml_engine/train/sample.ipynb b/components/gcp/ml_engine/train/sample.ipynb index 319e7abce16..c9d5dfbc063 100644 --- a/components/gcp/ml_engine/train/sample.ipynb +++ b/components/gcp/ml_engine/train/sample.ipynb @@ -56,12 +56,7 @@ "To use the component, you must:\n", "\n", "* Set up a cloud environment by following this [guide](https://cloud.google.com/ml-engine/docs/tensorflow/getting-started-training-prediction#setup).\n", - "* Run the component under a secret [Kubeflow user service account](https://www.kubeflow.org/docs/started/getting-started-gke/#gcp-service-accounts) in a Kubeflow cluster. For example:\n", - "\n", - " ```\n", - " mlengine_train_op(...).apply(gcp.use_gcp_secret('user-gcp-sa'))\n", - " ```\n", - "\n", + "* The component can authenticate to GCP. Refer to [Authenticating Pipelines to GCP](https://www.kubeflow.org/docs/gke/authentication-pipelines/) for details.\n", "* Grant the following access to the Kubeflow user service account: \n", " * Read access to the Cloud Storage buckets which contain the input data, packages, or Docker images.\n", " * Write access to the Cloud Storage bucket of the output directory.\n", @@ -104,7 +99,7 @@ "import kfp.components as comp\n", "\n", "mlengine_train_op = comp.load_component_from_url(\n", - " 'https://raw.githubusercontent.com/kubeflow/pipelines/e598176c02f45371336ccaa819409e8ec83743df/components/gcp/ml_engine/train/component.yaml')\n", + " 'https://raw.githubusercontent.com/kubeflow/pipelines/01a23ae8672d3b18e88adf3036071496aca3552d/components/gcp/ml_engine/train/component.yaml')\n", "help(mlengine_train_op)" ] }, @@ -219,7 +214,6 @@ "outputs": [], "source": [ "import kfp.dsl as dsl\n", - "import kfp.gcp as gcp\n", "import json\n", "@dsl.pipeline(\n", " name='CloudML training pipeline',\n", @@ -258,7 +252,7 @@ " worker_image_uri=worker_image_uri, \n", " training_input=training_input, \n", " job_id_prefix=job_id_prefix, \n", - " wait_interval=wait_interval).apply(gcp.use_gcp_secret('user-gcp-sa'))" + " wait_interval=wait_interval)" ] }, { @@ -360,4 +354,4 @@ }, "nbformat": 4, "nbformat_minor": 2 -} +} \ No newline at end of file diff --git a/components/git/clone/component.yaml b/components/git/clone/component.yaml new file mode 100644 index 00000000000..0e18ed64e6c --- /dev/null +++ b/components/git/clone/component.yaml @@ -0,0 +1,18 @@ +name: Git clone +description: Creates a shallow clone of the specified repo branch +inputs: +- {name: Repo URI, type: URI} +- {name: Branch, type: String, default: master} +outputs: +- {name: Repo dir, type: Directory} +implementation: + container: + image: alpine/git + command: + - git + - clone + - --depth=1 + - --branch + - inputValue: Branch + - inputValue: Repo URI + - outputPath: Repo dir diff --git a/components/google-cloud/storage/download/component.yaml b/components/google-cloud/storage/download/component.yaml new file mode 100644 index 00000000000..2ae6f5988ac --- /dev/null +++ b/components/google-cloud/storage/download/component.yaml @@ -0,0 +1,27 @@ +name: Download from GCS +inputs: +- {name: GCS path, type: URI} +outputs: +- {name: Data} +implementation: + container: + image: google/cloud-sdk + command: + - bash # Pattern comparison only works in Bash + - -ex + - -c + - | + if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then + gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" + fi + + stat_result=$(gsutil ls -d "$0") + if [[ "$stat_result" == */ ]]; then + mkdir -p "$1" # When source path is a directory, gsutil requires the destination to also be a directory + else + mkdir -p "$(dirname "$1")" + fi + + gsutil -m rsync -r "$0" "$1" # gsutil cp has different path handling than Linux cp. It always puts the source directory (name) inside the destination directory. gsutil rsync does not have that problem. + - inputValue: GCS path + - outputPath: Data diff --git a/components/google-cloud/storage/download_blob/component.yaml b/components/google-cloud/storage/download_blob/component.yaml new file mode 100644 index 00000000000..24b8b4ff143 --- /dev/null +++ b/components/google-cloud/storage/download_blob/component.yaml @@ -0,0 +1,20 @@ +name: Download from GCS +inputs: +- {name: GCS path, type: URI} +outputs: +- {name: Data} +implementation: + container: + image: google/cloud-sdk + command: + - sh + - -ex + - -c + - | + if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then + gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" + fi + mkdir -p "$(dirname "$1")" + gsutil -m cp -r "$0" "$1" + - inputValue: GCS path + - outputPath: Data diff --git a/components/google-cloud/storage/download_dir/component.yaml b/components/google-cloud/storage/download_dir/component.yaml new file mode 100644 index 00000000000..fb1da2153fa --- /dev/null +++ b/components/google-cloud/storage/download_dir/component.yaml @@ -0,0 +1,20 @@ +name: Download from GCS +inputs: +- {name: GCS path, type: URI} +outputs: +- {name: Data} +implementation: + container: + image: google/cloud-sdk + command: + - sh + - -ex + - -c + - | + if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then + gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" + fi + mkdir -p "$1" + gsutil -m cp -r "$0" "$1" + - inputValue: GCS path + - outputPath: Data diff --git a/components/google-cloud/storage/list/component.yaml b/components/google-cloud/storage/list/component.yaml new file mode 100644 index 00000000000..1b7892fa995 --- /dev/null +++ b/components/google-cloud/storage/list/component.yaml @@ -0,0 +1,20 @@ +name: List blobs +inputs: +- {name: GCS path, type: URI, description: 'GCS path for listing. For recursive listing use the "gs://bucket/path/**" syntax".'} +outputs: +- {name: Paths} +implementation: + container: + image: google/cloud-sdk + command: + - sh + - -ex + - -c + - | + if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then + gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" + fi + mkdir -p "$(dirname "$1")" + gsutil ls "$0" > "$1" + - inputValue: GCS path + - outputPath: Paths diff --git a/components/google-cloud/storage/upload_to_explicit_uri/component.yaml b/components/google-cloud/storage/upload_to_explicit_uri/component.yaml new file mode 100644 index 00000000000..d75aaaed57c --- /dev/null +++ b/components/google-cloud/storage/upload_to_explicit_uri/component.yaml @@ -0,0 +1,23 @@ +name: Upload to GCS +inputs: +- {name: Data} +- {name: GCS path, type: URI} +outputs: +- {name: GCS path, type: URI} +implementation: + container: + image: google/cloud-sdk + command: + - sh + - -ex + - -c + - | + if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then + gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" + fi + gsutil cp -r "$0" "$1" + mkdir -p "$(dirname "$2")" + echo "$1" > "$2" + - inputPath: Data + - inputValue: GCS path + - outputPath: GCS path diff --git a/components/google-cloud/storage/upload_to_unique_uri/component.yaml b/components/google-cloud/storage/upload_to_unique_uri/component.yaml new file mode 100644 index 00000000000..bcbe0c29f59 --- /dev/null +++ b/components/google-cloud/storage/upload_to_unique_uri/component.yaml @@ -0,0 +1,24 @@ +name: Upload to GCS +description: Upload to GCS with unique URI suffix +inputs: +- {name: Data} +- {name: GCS path prefix, type: URI} +outputs: +- {name: GCS path, type: URI} +implementation: + container: + image: google/cloud-sdk + command: + - sh + - -ex + - -c + - | + if [ -n "${GOOGLE_APPLICATION_CREDENTIALS}" ]; then + gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}" + fi + gsutil cp -r "$0" "$1" + mkdir -p "$(dirname "$2")" + echo "$1" > "$2" + - inputPath: Data + - concat: [{inputValue: GCS path prefix}, '{{workflow.uid}}_{{pod.name}}'] + - outputPath: GCS path diff --git a/components/ibm-components/watson/deploy/Dockerfile b/components/ibm-components/watson/deploy/Dockerfile index c2f6ba258dc..63548a9aab9 100644 --- a/components/ibm-components/watson/deploy/Dockerfile +++ b/components/ibm-components/watson/deploy/Dockerfile @@ -5,7 +5,7 @@ RUN mkdir /app RUN mkdir /app/secrets # Watson studio and machine learning python client -RUN pip install watson_machine_learning_client minio +RUN pip install watson-machine-learning-client-V4 minio # Python functions with endpoints to Watson Machine Learning COPY src/wml-deploy.py /app diff --git a/components/ibm-components/watson/deploy/component.yaml b/components/ibm-components/watson/deploy/component.yaml index 82d96ce82ef..14280589f5e 100644 --- a/components/ibm-components/watson/deploy/component.yaml +++ b/components/ibm-components/watson/deploy/component.yaml @@ -28,12 +28,11 @@ implementation: image: docker.io/aipipeline/wml-deploy:latest command: ['python'] args: [ - /app/wml-deploy.py, + -u, /app/wml-deploy.py, --model-uid, {inputValue: model_uid}, --model-name, {inputValue: model_name}, --scoring-payload, {inputValue: scoring_payload}, - --deployment-name, {inputValue: deployment_name} + --deployment-name, {inputValue: deployment_name}, + --output-scoring-endpoint-path, {outputPath: scoring_endpoint}, + --output-model-uid-path, {outputPath: model_uid} ] - fileOutputs: - scoring_endpoint: /tmp/scoring_endpoint - model_uid: /tmp/model_uid diff --git a/components/ibm-components/watson/deploy/src/wml-deploy.py b/components/ibm-components/watson/deploy/src/wml-deploy.py index 87764a5956d..6a369ec42ed 100644 --- a/components/ibm-components/watson/deploy/src/wml-deploy.py +++ b/components/ibm-components/watson/deploy/src/wml-deploy.py @@ -21,6 +21,7 @@ def getSecret(secret): def deploy(args): from watson_machine_learning_client import WatsonMachineLearningAPIClient from minio import Minio + from pathlib import Path import os import re @@ -42,11 +43,17 @@ def deploy(args): } client = WatsonMachineLearningAPIClient(wml_credentials) + client.deployments.list() + # deploy the model - deployment_desc = "deployment of %s" % wml_model_name - deployment = client.deployments.create(model_uid, deployment_name, deployment_desc) - scoring_endpoint = client.deployments.get_scoring_url(deployment) - print("scoring_endpoint: ", scoring_endpoint) + meta_props = { + client.deployments.ConfigurationMetaNames.NAME: deployment_name, + client.deployments.ConfigurationMetaNames.ONLINE: {} + } + deployment_details = client.deployments.create(model_uid, meta_props) + scoring_endpoint = client.deployments.get_scoring_href(deployment_details) + deployment_uid = client.deployments.get_uid(deployment_details) + print("deployment_uid: ", deployment_uid) if wml_scoring_payload: # download scoring payload if exist @@ -70,21 +77,19 @@ def deploy(args): import json with open(payload_file) as data_file: test_data = json.load(data_file) - payload = test_data['payload'] + payload = {client.deployments.ScoringMetaNames.INPUT_DATA: [test_data['payload']]} data_file.close() print("Scoring result: ") - result = client.deployments.score(scoring_endpoint, payload) + result = client.deployments.score(deployment_uid, payload) else: result = 'Scoring payload is not provided' print(result) - with open("/tmp/scoring_endpoint", "w") as f: - print(scoring_endpoint, file=f) - f.close() - with open("/tmp/model_uid", "w") as f: - print(model_uid, file=f) - f.close() + Path(args.output_scoring_endpoint_path).parent.mkdir(parents=True, exist_ok=True) + Path(args.output_scoring_endpoint_path).write_text(scoring_endpoint) + Path(args.output_model_uid_path).parent.mkdir(parents=True, exist_ok=True) + Path(args.output_model_uid_path).write_text(model_uid) if __name__ == "__main__": @@ -94,5 +99,7 @@ def deploy(args): parser.add_argument('--model-uid', type=str, required=True) parser.add_argument('--deployment-name', type=str) parser.add_argument('--scoring-payload', type=str) + parser.add_argument('--output-scoring-endpoint-path', type=str, default='/tmp/scoring_endpoint') + parser.add_argument('--output-model-uid-path', type=str, default='/tmp/model_uid') args = parser.parse_args() deploy(args) diff --git a/components/ibm-components/watson/store/Dockerfile b/components/ibm-components/watson/store/Dockerfile index 34c929230b7..06e472cc2ae 100644 --- a/components/ibm-components/watson/store/Dockerfile +++ b/components/ibm-components/watson/store/Dockerfile @@ -5,7 +5,7 @@ RUN mkdir /app RUN mkdir /app/secrets # Watson studio and machine learning python client -RUN pip install watson_machine_learning_client minio +RUN pip install watson-machine-learning-client-V4 minio # Python functions with endpoints to Watson Machine Learning COPY src/wml-store.py /app diff --git a/components/ibm-components/watson/store/component.yaml b/components/ibm-components/watson/store/component.yaml index 965b8fcf4a3..8496a4883e7 100644 --- a/components/ibm-components/watson/store/component.yaml +++ b/components/ibm-components/watson/store/component.yaml @@ -18,6 +18,9 @@ metadata: inputs: - {name: run_uid, description: 'Required. UID for the Watson Machine Learning training-runs'} - {name: model_name, description: 'Required. Model Name to store on Watson Machine Learning'} + - {name: framework, description: 'ML/DL Model Framework', default: 'tensorflow'} + - {name: framework_version, description: 'Model Framework version', default: '1.14'} + - {name: runtime_version, description: 'Model Code runtime version', default: '3.6'} outputs: - {name: model_uid, description: 'UID for the stored model on Watson Machine Learning'} implementation: @@ -25,9 +28,11 @@ implementation: image: docker.io/aipipeline/wml-store:latest command: ['python3'] args: [ - /app/wml-store.py, + -u, /app/wml-store.py, --run-uid, {inputValue: run_uid}, - --model-name, {inputValue: model_name} + --model-name, {inputValue: model_name}, + --framework, {inputValue: framework}, + --framework-version, {inputValue: framework_version}, + --runtime-version, {inputValue: runtime_version}, + --output-model-uid-path, {outputPath: model_uid} ] - fileOutputs: - model_uid: /tmp/model_uid diff --git a/components/ibm-components/watson/store/src/wml-store.py b/components/ibm-components/watson/store/src/wml-store.py index 28df64dea07..171adf7720f 100644 --- a/components/ibm-components/watson/store/src/wml-store.py +++ b/components/ibm-components/watson/store/src/wml-store.py @@ -18,31 +18,42 @@ def getSecret(secret): f.close() return res -def store(wml_model_name, run_uid): +def store(wml_model_name, run_uid, framework, framework_version, runtime_version, output_model_uid_path): from watson_machine_learning_client import WatsonMachineLearningAPIClient + from pathlib import Path # retrieve credentials wml_url = getSecret("/app/secrets/wml_url") wml_instance_id = getSecret("/app/secrets/wml_instance_id") wml_apikey = getSecret("/app/secrets/wml_apikey") + runtime_uid = framework + '_' + framework_version + '-py' + runtime_version + runtime_type = framework + '_' + framework_version + + print("runtime_uid:", runtime_uid) + print("runtime_type:", runtime_type) # set up the WML client wml_credentials = { "url": wml_url, "instance_id": wml_instance_id, "apikey": wml_apikey } - client = WatsonMachineLearningAPIClient( wml_credentials ) + client = WatsonMachineLearningAPIClient(wml_credentials) # store the model - stored_model_name = wml_model_name - stored_model_details = client.repository.store_model( run_uid, stored_model_name ) - model_uid = client.repository.get_model_uid( stored_model_details ) - print( "model_uid: ", model_uid ) + meta_props_tf = { + client.repository.ModelMetaNames.NAME: wml_model_name, + client.repository.ModelMetaNames.RUNTIME_UID: runtime_uid, + client.repository.ModelMetaNames.TYPE: runtime_type + } - with open("/tmp/model_uid", "w") as f: - f.write(model_uid) - f.close() + model_details = client.repository.store_model(run_uid, meta_props=meta_props_tf) + + model_uid = client.repository.get_model_uid(model_details) + print("model_uid: ", model_uid) + + Path(output_model_uid_path).parent.mkdir(parents=True, exist_ok=True) + Path(output_model_uid_path).write_text(model_uid) import time time.sleep(120) @@ -52,5 +63,14 @@ def store(wml_model_name, run_uid): parser = argparse.ArgumentParser() parser.add_argument('--model-name', type=str, required=True) parser.add_argument('--run-uid', type=str, required=True) + parser.add_argument('--framework', type=str, required=True) + parser.add_argument('--framework-version', type=str, required=True) + parser.add_argument('--runtime-version', type=str, required=True) + parser.add_argument('--output-model-uid-path', type=str, default='/tmp/model_uid') args = parser.parse_args() - store(args.model_name, args.run_uid) + store(args.model_name, + args.run_uid, + args.framework, + args.framework_version, + args.runtime_version, + args.output_model_uid_path) diff --git a/components/ibm-components/watson/train/Dockerfile b/components/ibm-components/watson/train/Dockerfile index 57d2d9bdb19..06b35a0dcf3 100644 --- a/components/ibm-components/watson/train/Dockerfile +++ b/components/ibm-components/watson/train/Dockerfile @@ -5,7 +5,7 @@ RUN mkdir /app RUN mkdir /app/secrets # Watson studio and machine learning python client -RUN pip install watson_machine_learning_client minio +RUN pip install watson-machine-learning-client-V4 minio # Python functions with endpoints to Watson Machine Learning COPY src/wml-train.py /app diff --git a/components/ibm-components/watson/train/component.yaml b/components/ibm-components/watson/train/component.yaml index 97efa4a4c4f..5838da5cf67 100644 --- a/components/ibm-components/watson/train/component.yaml +++ b/components/ibm-components/watson/train/component.yaml @@ -20,12 +20,14 @@ inputs: - {name: execution_command, description: 'Required. Execution command to start the model training.'} - {name: config, description: 'Credential configfile is properly created.', default: 'secret_name'} - {name: framework, description: 'ML/DL Model Framework', default: 'tensorflow'} - - {name: framework_version, description: 'Model Framework version', default: '1.5'} + - {name: framework_version, description: 'Model Framework version', default: '1.14'} - {name: runtime, description: 'Model Code runtime language', default: 'python'} - - {name: runtime_version, description: 'Model Code runtime version', default: '3.5'} + - {name: runtime_version, description: 'Model Code runtime version', default: '3.6'} - {name: run_definition, description: 'Name for the Watson Machine Learning training definition', default: 'python-tensorflow-definition'} - {name: run_name, description: 'Name for the Watson Machine Learning training-runs', default: 'python-tensorflow-run'} - {name: author_name, description: 'Name of this training job author', default: 'default-author'} + - {name: compute_name, description: 'Name of the compute tiers, in WML is the gpu count', default: 'k80'} + - {name: compute_nodes, description: 'Number of compute machine', default: '1'} outputs: - {name: run_uid, description: 'UID for the Watson Machine Learning training-runs'} - {name: training_uid, description: 'Training Location UID for the Watson Machine Learning training-runs'} @@ -34,7 +36,7 @@ implementation: image: docker.io/aipipeline/wml-train:latest command: ['python3'] args: [ - /app/wml-train.py, + -u, /app/wml-train.py, --config, {inputValue: config}, --train-code, {inputValue: train_code}, --execution-command, {inputValue: execution_command}, @@ -44,8 +46,9 @@ implementation: --runtime-version, {inputValue: runtime_version}, --run-definition, {inputValue: run_definition}, --run-name, {inputValue: run_name}, - --author-name, {inputValue: author_name} + --author-name, {inputValue: author_name}, + --compute-name, {inputValue: compute_name}, + --compute-nodes,{inputValue: compute_nodes}, + --output-run-uid-path, {outputPath: run_uid}, + --output-training-uid-path, {outputPath: training_uid} ] - fileOutputs: - run_uid: /tmp/run_uid - training_uid: /tmp/training_uid diff --git a/components/ibm-components/watson/train/src/wml-train.py b/components/ibm-components/watson/train/src/wml-train.py index 018f66ca2d8..2ed54b50b86 100644 --- a/components/ibm-components/watson/train/src/wml-train.py +++ b/components/ibm-components/watson/train/src/wml-train.py @@ -22,17 +22,23 @@ def train(args): from watson_machine_learning_client import WatsonMachineLearningAPIClient from minio import Minio from urllib.parse import urlsplit + from pathlib import Path import os,time wml_train_code = args.train_code wml_execution_command = args.execution_command.strip('\'') wml_framework_name = args.framework if args.framework else 'tensorflow' - wml_framework_version = args.framework_version if args.framework_version else '1.5' + wml_framework_version = args.framework_version if args.framework_version else '1.14' wml_runtime_name = args.runtime if args.runtime else 'python' - wml_runtime_version = args.runtime_version if args.runtime_version else '3.5' + wml_runtime_version = args.runtime_version if args.runtime_version else '3.6' wml_run_definition = args.run_definition if args.run_definition else 'python-tensorflow-definition' wml_run_name = args.run_name if args.run_name else 'python-tensorflow-run' wml_author_name = args.author_name if args.author_name else 'default-author' + wml_compute_name = args.compute_name if args.compute_name else 'k80' + wml_compute_nodes = args.compute_nodes if args.compute_nodes else '1' + + wml_runtime_version_v4 = wml_framework_version + '-py' + wml_runtime_version + wml_compute_nodes_v4 = int(wml_compute_nodes) # retrieve credentials wml_url = getSecret("/app/secrets/wml_url") @@ -40,7 +46,7 @@ def train(args): wml_instance_id = getSecret("/app/secrets/wml_instance_id") wml_data_source_type = getSecret("/app/secrets/wml_data_source_type") - + cos_endpoint = getSecret("/app/secrets/cos_endpoint") cos_endpoint_parts = urlsplit(cos_endpoint) if bool(cos_endpoint_parts.scheme): @@ -57,9 +63,10 @@ def train(args): model_code = os.path.join('/app', wml_train_code) cos = Minio(cos_endpoint_hostname, - access_key = cos_access_key, - secret_key = cos_secret_key, - secure = True) + access_key=cos_access_key, + secret_key=cos_secret_key, + secure=True) + cos.fget_object(cos_input_bucket, wml_train_code, model_code) # set up the WML client @@ -68,75 +75,125 @@ def train(args): "instance_id": wml_instance_id, "apikey": wml_apikey } - client = WatsonMachineLearningAPIClient( wml_credentials ) - + client = WatsonMachineLearningAPIClient(wml_credentials) # define the model - metadata = { - client.repository.DefinitionMetaNames.NAME : wml_run_definition, - client.repository.DefinitionMetaNames.AUTHOR_NAME : wml_author_name, - client.repository.DefinitionMetaNames.FRAMEWORK_NAME : wml_framework_name, - client.repository.DefinitionMetaNames.FRAMEWORK_VERSION : wml_framework_version, - client.repository.DefinitionMetaNames.RUNTIME_NAME : wml_runtime_name, - client.repository.DefinitionMetaNames.RUNTIME_VERSION : wml_runtime_version, - client.repository.DefinitionMetaNames.EXECUTION_COMMAND : wml_execution_command + lib_meta = { + client.runtimes.LibraryMetaNames.NAME: wml_run_definition, + client.runtimes.LibraryMetaNames.VERSION: wml_framework_version, + client.runtimes.LibraryMetaNames.FILEPATH: model_code, + client.runtimes.LibraryMetaNames.PLATFORM: {"name": wml_framework_name, "versions": [wml_framework_version]} + } + # check exisiting library + library_details = client.runtimes.get_library_details() + for library_detail in library_details['resources']: + if library_detail['entity']['name'] == wml_run_definition: + # Delete library if exist because we cannot update model_code + uid = client.runtimes.get_library_uid(library_detail) + client.repository.delete(uid) + break + custom_library_details = client.runtimes.store_library(lib_meta) + custom_library_uid = client.runtimes.get_library_uid(custom_library_details) + + # create a pipeline with the model definitions included + doc = { + "doc_type": "pipeline", + "version": "2.0", + "primary_pipeline": wml_framework_name, + "pipelines": [{ + "id": wml_framework_name, + "runtime_ref": "hybrid", + "nodes": [{ + "id": "training", + "type": "model_node", + "op": "dl_train", + "runtime_ref": wml_run_name, + "inputs": [], + "outputs": [], + "parameters": { + "name": "tf-mnist", + "description": wml_run_definition, + "command": wml_execution_command, + "training_lib_href": "/v4/libraries/"+custom_library_uid, + "compute": { + "name": wml_compute_name, + "nodes": wml_compute_nodes_v4 + } + } + }] + }], + "runtimes": [{ + "id": wml_run_name, + "name": wml_framework_name, + "version": wml_runtime_version_v4 + }] } - definition_details = client.repository.store_definition( model_code, meta_props=metadata ) - definition_uid = client.repository.get_definition_uid( definition_details ) - # print( "definition_uid: ", definition_uid ) + metadata = { + client.repository.PipelineMetaNames.NAME: wml_run_name, + client.repository.PipelineMetaNames.DOCUMENT: doc + } + pipeline_id = client.pipelines.get_uid(client.repository.store_pipeline(meta_props=metadata)) + + client.pipelines.get_details(pipeline_id) - # define the run + # start the training run for v4 metadata = { - client.training.ConfigurationMetaNames.NAME : wml_run_name, - client.training.ConfigurationMetaNames.AUTHOR_NAME : wml_author_name, - client.training.ConfigurationMetaNames.TRAINING_DATA_REFERENCE : { - "connection" : { - "endpoint_url" : cos_endpoint, - "access_key_id" : cos_access_key, - "secret_access_key" : cos_secret_key + client.training.ConfigurationMetaNames.TRAINING_RESULTS_REFERENCE: { + "name": "training-results-reference_name", + "connection": { + "endpoint_url": cos_endpoint, + "access_key_id": cos_access_key, + "secret_access_key": cos_secret_key }, - "source" : { - "bucket" : cos_input_bucket, + "location": { + "bucket": cos_output_bucket }, - "type" : wml_data_source_type + "type": wml_data_source_type }, - client.training.ConfigurationMetaNames.TRAINING_RESULTS_REFERENCE: { - "connection" : { - "endpoint_url" : cos_endpoint, - "access_key_id" : cos_access_key, - "secret_access_key" : cos_secret_key - }, - "target" : { - "bucket" : cos_output_bucket, + client.training.ConfigurationMetaNames.TRAINING_DATA_REFERENCES:[{ + "name": "training_input_data", + "type": wml_data_source_type, + "connection": { + "endpoint_url": cos_endpoint, + "access_key_id": cos_access_key, + "secret_access_key": cos_secret_key }, - "type" : wml_data_source_type - } + "location": { + "bucket": cos_input_bucket + } + }], + client.training.ConfigurationMetaNames.PIPELINE_UID: pipeline_id } - # start the training - run_details = client.training.run( definition_uid, meta_props=metadata, asynchronous=True ) - run_uid = client.training.get_run_uid( run_details ) - with open("/tmp/run_uid", "w") as f: - f.write(run_uid) - f.close() + training_id = client.training.get_uid(client.training.run(meta_props=metadata)) + print("training_id", client.training.get_details(training_id)) + print("get status", client.training.get_status(training_id)) + # for v4 + run_details = client.training.get_details(training_id) + run_uid = training_id # print logs client.training.monitor_logs(run_uid) client.training.monitor_metrics(run_uid) # checking the result - status = client.training.get_status( run_uid ) + status = client.training.get_status(run_uid) + print("status: ", status) while status['state'] != 'completed': time.sleep(20) - status = client.training.get_status( run_uid ) + status = client.training.get_status(run_uid) print(status) + Path(args.output_run_uid_path).parent.mkdir(parents=True, exist_ok=True) + Path(args.output_run_uid_path).write_text(run_uid) + # Get training details training_details = client.training.get_details(run_uid) - with open("/tmp/training_uid", "w") as f: - training_uid = training_details['entity']['training_results_reference']['location']['model_location'] - f.write(training_uid) - f.close() + print("training_details", training_details) + + training_uid = training_details['entity']['results_reference']['location']['training'] + Path(args.output_training_uid_path).parent.mkdir(parents=True, exist_ok=True) + Path(args.output_training_uid_path).write_text(training_uid) if __name__ == "__main__": import argparse @@ -151,6 +208,10 @@ def train(args): parser.add_argument('--run-name', type=str) parser.add_argument('--author-name', type=str) parser.add_argument('--config', type=str, default="secret_name") + parser.add_argument('--compute-name', type=str) + parser.add_argument('--compute-nodes', type=str) + parser.add_argument('--output-run-uid-path', type=str, default="/tmp/run_uid") + parser.add_argument('--output-training-uid-path', type=str, default="/tmp/training_uid") args = parser.parse_args() # Check secret name is not empty if (not args.config): diff --git a/components/kubeflow/common/launch_crd.py b/components/kubeflow/common/launch_crd.py new file mode 100644 index 00000000000..cb8a790baad --- /dev/null +++ b/components/kubeflow/common/launch_crd.py @@ -0,0 +1,136 @@ +# Copyright 2019 kubeflow.org. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import json +import logging +import multiprocessing +import time + +from kubernetes import client as k8s_client +from kubernetes.client import rest + +class K8sCR(object): + def __init__(self, group, plural, version, client): + self.group = group + self.plural = plural + self.version = version + self.client = k8s_client.CustomObjectsApi(client) + + def wait_for_condition(self, + namespace, + name, + expected_conditions=[], + timeout=datetime.timedelta(days=365), + polling_interval=datetime.timedelta(seconds=30), + status_callback=None): + """Waits until any of the specified conditions occur. + Args: + namespace: namespace for the CR. + name: Name of the CR. + expected_conditions: A list of conditions. Function waits until any of the + supplied conditions is reached. + timeout: How long to wait for the CR. + polling_interval: How often to poll for the status of the CR. + status_callback: (Optional): Callable. If supplied this callable is + invoked after we poll the CR. Callable takes a single argument which + is the CR. + """ + end_time = datetime.datetime.now() + timeout + while True: + try: + results = self.client.get_namespaced_custom_object( + self.group, self.version, namespace, self.plural, name) + except Exception as e: + logging.error("There was a problem waiting for %s/%s %s in namespace %s; Exception: %s", + self.group, self.plural, name, namespace, e) + raise + + if results: + if status_callback: + status_callback(results) + expected, condition = self.is_expected_conditions(results, expected_conditions) + if expected: + logging.info("%s/%s %s in namespace %s has reached the expected condition: %s.", + self.group, self.plural, name, namespace, condition) + return results + else: + if condition: + logging.info("Current condition of %s/%s %s in namespace %s is %s.", + self.group, self.plural, name, namespace, condition) + + if datetime.datetime.now() + polling_interval > end_time: + raise Exception( + "Timeout waiting for {0}/{1} {2} in namespace {3} to enter one of the " + "conditions {4}.".format(self.group, self.plural, name, namespace, expected_conditions)) + + time.sleep(polling_interval.seconds) + + def is_expected_conditions(self, cr_object, expected_conditions): + return False, "" + + def create(self, spec): + """Create a CR. + Args: + spec: The spec for the CR. + """ + try: + # Create a Resource + namespace = spec["metadata"].get("namespace", "default") + logging.info("Creating %s/%s %s in namespace %s.", + self.group, self.plural, spec["metadata"]["name"], namespace) + api_response = self.client.create_namespaced_custom_object( + self.group, self.version, namespace, self.plural, spec) + logging.info("Created %s/%s %s in namespace %s.", + self.group, self.plural, spec["metadata"]["name"], namespace) + return api_response + except rest.ApiException as e: + self._log_and_raise_exception(e, "create") + + def delete(self, name, namespace): + try: + body = { + # Set garbage collection so that CR won't be deleted until all + # owned references are deleted. + "propagationPolicy": "Foreground", + } + logging.info("Deleteing %s/%s %s in namespace %s.", + self.group, self.plural, name, namespace) + api_response = self.client.delete_namespaced_custom_object( + self.group, + self.version, + namespace, + self.plural, + name, + body) + logging.info("Deleted %s/%s %s in namespace %s.", + self.group, self.plural, name, namespace) + return api_response + except rest.ApiException as e: + self._log_and_raise_exception(e, "delete") + + def _log_and_raise_exception(self, ex, action): + message = "" + if ex.message: + message = ex.message + if ex.body: + try: + body = json.loads(ex.body) + message = body.get("message") + except ValueError: + logging.error("Exception when %s %s/%s: %s", action, self.group, self.plural, ex.body) + raise + + logging.error("Exception when %s %s/%s: %s", action, self.group, self.plural, ex.body) + raise ex + diff --git a/components/kubeflow/deployer/component.yaml b/components/kubeflow/deployer/component.yaml index 2386f046ccd..facbb394e3d 100644 --- a/components/kubeflow/deployer/component.yaml +++ b/components/kubeflow/deployer/component.yaml @@ -11,7 +11,7 @@ inputs: # - {name: Endppoint URI, type: Serving URI, description: 'URI of the deployed prediction service..'} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:ad9bd5648dd0453005225779f25d8cebebc7ca00 command: [/bin/deploy.sh] args: [ --model-export-path, {inputValue: Model dir}, diff --git a/components/kubeflow/dnntrainer/component.yaml b/components/kubeflow/dnntrainer/component.yaml index 927ff2cabcc..ecc787277ca 100644 --- a/components/kubeflow/dnntrainer/component.yaml +++ b/components/kubeflow/dnntrainer/component.yaml @@ -16,7 +16,7 @@ outputs: - {name: MLPipeline UI metadata, type: UI metadata} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:ad9bd5648dd0453005225779f25d8cebebc7ca00 command: [python2, -m, trainer.task] args: [ --transformed-data-dir, {inputValue: Transformed data dir}, diff --git a/components/kubeflow/dnntrainer/src/setup.py b/components/kubeflow/dnntrainer/src/setup.py index fe6001945f0..7c7099343f4 100644 --- a/components/kubeflow/dnntrainer/src/setup.py +++ b/components/kubeflow/dnntrainer/src/setup.py @@ -28,7 +28,7 @@ long_description=""" """, install_requires=[ - 'tensorflow==1.12.1', + 'tensorflow==1.15.2', ], package_data={ }, diff --git a/components/kubeflow/katib-launcher/Dockerfile b/components/kubeflow/katib-launcher/Dockerfile index 81d927e559a..074237a63e6 100644 --- a/components/kubeflow/katib-launcher/Dockerfile +++ b/components/kubeflow/katib-launcher/Dockerfile @@ -14,16 +14,13 @@ FROM ubuntu:16.04 RUN apt-get update -y && \ - apt-get install --no-install-recommends -y -q ca-certificates python-dev python-setuptools wget unzip git && \ + apt-get install --no-install-recommends -y -q ca-certificates python-dev python-setuptools wget && \ easy_install pip && \ - pip install pyyaml==3.12 six==1.11.0 requests==2.18.4 grpcio gcloud google-api-python-client protobuf kubernetes && \ - wget https://github.com/kubeflow/katib/archive/master.zip && unzip master.zip - -ENV PYTHONPATH $PYTHONPATH:/katib-master/pkg/api/v1alpha1/python:/katib-master/py + pip install pyyaml==3.12 kubernetes ADD build /ml RUN mkdir /usr/licenses && \ /ml/license.sh /ml/third_party_licenses.csv /usr/licenses -ENTRYPOINT ["python", "/ml/launch_study_job.py"] +ENTRYPOINT ["python", "/ml/launch_experiment.py"] diff --git a/components/kubeflow/katib-launcher/build_image.sh b/components/kubeflow/katib-launcher/build_image.sh index 1b96dba236f..181f73b9a14 100755 --- a/components/kubeflow/katib-launcher/build_image.sh +++ b/components/kubeflow/katib-launcher/build_image.sh @@ -34,25 +34,26 @@ done mkdir -p ./build rsync -arvp ./src/ ./build/ +rsync -arvp ../common/ ./build/ cp ../../license.sh ./build cp ../../third_party_licenses.csv ./build -LOCAL_LAUNCHER_IMAGE_NAME=ml-pipeline-kubeflow-studyjob +LOCAL_LAUNCHER_IMAGE_NAME=ml-pipeline-kubeflow-experiment docker build -t ${LOCAL_LAUNCHER_IMAGE_NAME} . +if [ -z "${TAG_NAME}" ]; then + TAG_NAME=$(date +v%Y%m%d)-$(git describe --tags --always --dirty)-$(git diff | shasum -a256 | cut -c -6) +fi if [ -z "${LAUNCHER_IMAGE_NAME}" ]; then - if [ -z "${TAG_NAME}" ]; then - TAG_NAME=$(date +v%Y%m%d)-$(git describe --tags --always --dirty)-$(git diff | shasum -a256 | cut -c -6) - fi if [ -z "${PROJECT_ID}" ]; then PROJECT_ID=$(gcloud config config-helper --format "value(configuration.properties.core.project)") fi docker tag ${LOCAL_LAUNCHER_IMAGE_NAME} gcr.io/${PROJECT_ID}/${LOCAL_LAUNCHER_IMAGE_NAME}:${TAG_NAME} docker push gcr.io/${PROJECT_ID}/${LOCAL_LAUNCHER_IMAGE_NAME}:${TAG_NAME} else - docker tag ${LOCAL_LAUNCHER_IMAGE_NAME} ${LAUNCHER_IMAGE_NAME} - docker push ${LAUNCHER_IMAGE_NAME} + docker tag ${LOCAL_LAUNCHER_IMAGE_NAME} ${LAUNCHER_IMAGE_NAME}:${TAG_NAME} + docker push ${LAUNCHER_IMAGE_NAME}:${TAG_NAME} fi rm -rf ./build diff --git a/components/kubeflow/katib-launcher/component.yaml b/components/kubeflow/katib-launcher/component.yaml index c458daed395..97babaa8a48 100644 --- a/components/kubeflow/katib-launcher/component.yaml +++ b/components/kubeflow/katib-launcher/component.yaml @@ -1,40 +1,38 @@ -name: Kubeflow - Launch StudyJob -description: Kubeflow StudyJob launcher +name: Kubeflow - Launch Experiment +description: Kubeflow Experiment launcher inputs: -- {name: StudyJob name, type: String, description: 'Job name.'} -- {name: Namespace, type: String, default: kubeflow, description: 'Namespace.'} -- {name: Optimization type, type: String, default: minimize, description: 'Direction of optimization. minimize or maximize.'} -- {name: Objective value name, type: String, description: 'Objective value name which trainer optimizes.'} -- {name: Optimization goal, type: Float, description: 'Stop studying once objectivevaluename value exceeds optimizationgoal.'} -- {name: Request count, type: Integer, default: 1, description: 'Number of requests to the suggestion service.'} -- {name: Metrics names, type: String, description: 'List of metric names (comma-delimited).'} -- {name: Parameter configs, type: YAML, default: '', description: 'Parameter configs (YAML/JSON format).'} -- {name: NAS config, type: YAML, default: '', description: 'NAS config (YAML/JSON format).'} -- {name: Worker template path, type: String, default: '', description: 'Worker spec.'} -- {name: Metrics collector template path, type: String, default: '', description: 'Metrics collector spec.'} -- {name: Suggestion spec, type: YAML, default: '', description: 'Suggestion spec (YAML/JSON format).'} -- {name: StudyJob timeout minutes, type: Integer, default: '10', description: 'Time in minutes to wait for the StudyJob to complete.'} -- {name: Delete finished job, type: Bool, default: 'True', description: 'Whether to delete the job after it is finished.'} +- {name: Experiment Name, type: String, description: 'Experiment name.'} +- {name: Experiment Namespace, type: String, default: kubeflow, description: 'Experiment namespace.'} +- {name: Experiment Version, type: String, default: v1alpha3, description: 'Experiment version.'} +- {name: Max Trial Count, type: Integer, description: 'How many trials can be executed at most.'} +- {name: Max Failed Trial Count, type: Integer, default: 3, description: 'How many trials can fail at most.'} +- {name: Parallel Trial Count, type: Integer, default: 3, description: 'How many trials can be running in parallel at most.'} +- {name: Objective, type: JSON, description: 'Experiment objective.'} +- {name: Algorithm, type: JSON, description: 'Experiment algorithm.'} +- {name: Trial Template, type: JSON, description: 'Experiment trialTemplate.'} +- {name: Parameters, type: JSON, description: 'Experiment Parameter configuration.'} +- {name: Metrics Collector, type: JSON, default: '{}', description: 'Experiment metricsCollector.'} +- {name: Experiment Timeout Minutes, type: Integer, default: 1440, description: 'Time in minutes to wait for the Experiment to complete.'} +- {name: Delete Finished Experiment, type: Bool, default: 'True', description: 'Whether to delete the experiment after it is finished.'} outputs: -- {name: Best parameter set, type: JSON, description: 'The parameter set of the best StudyJob trial.'} +- {name: Best Parameter Set, type: JSON, description: 'The parameter set of the best Experiment trial.'} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-studyjob:2c2445df83fa879387a200747cc20f72a7ee9727 - command: [python, /ml/launch_study_job.py] + image: liuhougangxa/katib-experiment-launcher:latest + command: [python, /ml/launch_experiment.py] args: [ - --name, {inputValue: StudyJob name}, - --namespace, {inputValue: Namespace}, - --optimizationtype, {inputValue: Optimization type}, - --objectivevaluename, {inputValue: Objective value name}, - --optimizationgoal, {inputValue: Optimization goal}, - --requestcount, {inputValue: Request count}, - --metricsnames, {inputValue: Metrics names}, - --parameterconfigs, {inputValue: Parameter configs}, - --nasConfig, {inputValue: NAS config}, - --workertemplatepath, {inputValue: Worker template path}, - --mcollectortemplatepath, {inputValue: Metrics collector template path}, - --suggestionspec, {inputValue: Suggestion spec}, - --studyjobtimeoutminutes, {inputValue: StudyJob timeout minutes}, - --deleteAfterDone, {inputValue: Delete finished job}, - --outputfile, {outputPath: Best parameter set}, + --name, {inputValue: Experiment Name}, + --namespace, {inputValue: Experiment Namespace}, + --version, {inputValue: Experiment Version}, + --maxTrialCount, {inputValue: Max Trial Count}, + --maxFailedTrialCount, {inputValue: Max Failed Trial Count}, + --parallelTrialCount, {inputValue: Parallel Trial Count}, + --objectiveConfig, {inputValue: Objective}, + --algorithmConfig, {inputValue: Algorithm}, + --trialTemplate, {inputValue: Trial Template}, + --parameters, {inputValue: Parameters}, + --metricsCollector, {inputValue: Metrics Collector}, + --experimentTimeoutMinutes, {inputValue: Experiment Timeout Minutes}, + --deleteAfterDone, {inputValue: Delete Finished Experiment}, + --outputFile, {outputPath: Best Parameter Set}, ] diff --git a/components/kubeflow/katib-launcher/kubeflow_katib_launcher_op.py b/components/kubeflow/katib-launcher/kubeflow_katib_launcher_op.py deleted file mode 100644 index 8db12e933ec..00000000000 --- a/components/kubeflow/katib-launcher/kubeflow_katib_launcher_op.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import kfp.dsl as dsl - -def kubeflow_studyjob_launcher_op(name, namespace, optimizationtype, objectivevaluename, optimizationgoal, requestcount, metricsnames, - parameterconfigs, nasConfig, workertemplatepath, mcollectortemplatepath, suggestionspec, - studyjob_timeout_minutes, delete=True, output_file='/output.txt', step_name='StudyJob-Launcher'): - return dsl.ContainerOp( - name = step_name, - image = 'liuhougangxa/ml-pipeline-kubeflow-studyjob:latest', - arguments = [ - '--name', name, - '--namespace', namespace, - "--optimizationtype", optimizationtype, - "--objectivevaluename", objectivevaluename, - "--optimizationgoal", optimizationgoal, - "--requestcount", requestcount, - "--metricsnames", metricsnames, - "--parameterconfigs", parameterconfigs, - "--nasConfig", nasConfig, - "--workertemplatepath", workertemplatepath, - "--mcollectortemplatepath", mcollectortemplatepath, - "--suggestionspec", suggestionspec, - "--outputfile", output_file, - "--deleteAfterDone", delete, - '--studyjobtimeoutminutes', studyjob_timeout_minutes, - ], - file_outputs = {'hyperparameter': output_file} - ) diff --git a/components/kubeflow/katib-launcher/sample.py b/components/kubeflow/katib-launcher/sample.py new file mode 100644 index 00000000000..565830ee312 --- /dev/null +++ b/components/kubeflow/katib-launcher/sample.py @@ -0,0 +1,114 @@ +import json +import kfp.dsl as dsl + +@dsl.pipeline( + name="Launch katib experiment", + description="An example to launch katib experiment." +) +def mnist_hpo( + name="mnist", + namespace="kubeflow", + goal=0.99, + parallelTrialCount=3, + maxTrialCount=12, + experimentTimeoutMinutes=60, + deleteAfterDone=True): + objectiveConfig = { + "type": "maximize", + "goal": goal, + "objectiveMetricName": "Validation-accuracy", + "additionalMetricNames": ["accuracy"] + } + algorithmConfig = {"algorithmName" : "random"} + parameters = [ + {"name": "--lr", "parameterType": "double", "feasibleSpace": {"min": "0.01","max": "0.03"}}, + {"name": "--num-layers", "parameterType": "int", "feasibleSpace": {"min": "2", "max": "5"}}, + {"name": "--optimizer", "parameterType": "categorical", "feasibleSpace": {"list": ["sgd", "adam", "ftrl"]}} + ] + rawTemplate = { + "apiVersion": "batch/v1", + "kind": "Job", + "metadata": { + "name": "{{.Trial}}", + "namespace": "{{.NameSpace}}" + }, + "spec": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + {"name": "{{.Trial}}", + "image": "docker.io/katib/mxnet-mnist-example", + "command": [ + "python /mxnet/example/image-classification/train_mnist.py --batch-size=64 {{- with .HyperParameters}} {{- range .}} {{.Name}}={{.Value}} {{- end}} {{- end}}" + ] + } + ] + } + } + } + } + trialTemplate = { + "goTemplate": { + "rawTemplate": json.dumps(rawTemplate) + } + } + op1 = katib_experiment_launcher_op( + name, + namespace, + parallelTrialCount=parallelTrialCount, + maxTrialCount=maxTrialCount, + objectiveConfig=str(objectiveConfig), + algorithmConfig=str(algorithmConfig), + trialTemplate=str(trialTemplate), + parameters=str(parameters), + experimentTimeoutMinutes=experimentTimeoutMinutes, + deleteAfterDone=deleteAfterDone + ) + + op_out = dsl.ContainerOp( + name="my-out-cop", + image="library/bash:4.4.23", + command=["sh", "-c"], + arguments=["echo hyperparameter: %s" % op1.output], + ) + + +def katib_experiment_launcher_op( + name, + namespace, + maxTrialCount=100, + parallelTrialCount=3, + maxFailedTrialCount=3, + objectiveConfig='{}', + algorithmConfig='{}', + metricsCollector='{}', + trialTemplate='{}', + parameters='[]', + experimentTimeoutMinutes=60, + deleteAfterDone=True, + outputFile='/output.txt'): + return dsl.ContainerOp( + name = "mnist-hpo", + image = 'liuhougangxa/katib-experiment-launcher:latest', + arguments = [ + '--name', name, + '--namespace', namespace, + '--maxTrialCount', maxTrialCount, + '--maxFailedTrialCount', maxFailedTrialCount, + '--parallelTrialCount', parallelTrialCount, + '--objectiveConfig', objectiveConfig, + '--algorithmConfig', algorithmConfig, + '--metricsCollector', metricsCollector, + '--trialTemplate', trialTemplate, + '--parameters', parameters, + '--outputFile', outputFile, + '--deleteAfterDone', deleteAfterDone, + '--experimentTimeoutMinutes', experimentTimeoutMinutes, + ], + file_outputs = {'bestHyperParameter': outputFile} + ) + +if __name__ == "__main__": + import kfp.compiler as compiler + compiler.Compiler().compile(mnist_hpo, __file__ + ".tar.gz") diff --git a/components/kubeflow/katib-launcher/sample2.py b/components/kubeflow/katib-launcher/sample2.py new file mode 100644 index 00000000000..67b79890a0e --- /dev/null +++ b/components/kubeflow/katib-launcher/sample2.py @@ -0,0 +1,80 @@ +import json +from kfp import components +import kfp.dsl as dsl + +@dsl.pipeline( + name="Launch katib experiment", + description="An example to launch katib experiment." +) +def mnist_hpo( + name="mnist", + namespace="kubeflow", + goal=0.99, + parallelTrialCount=3, + maxTrialCount=12, + experimentTimeoutMinutes=60, + deleteAfterDone=True): + objectiveConfig = { + "type": "maximize", + "goal": goal, + "objectiveMetricName": "Validation-accuracy", + "additionalMetricNames": ["accuracy"] + } + algorithmConfig = {"algorithmName" : "random"} + parameters = [ + {"name": "--lr", "parameterType": "double", "feasibleSpace": {"min": "0.01","max": "0.03"}}, + {"name": "--num-layers", "parameterType": "int", "feasibleSpace": {"min": "2", "max": "5"}}, + {"name": "--optimizer", "parameterType": "categorical", "feasibleSpace": {"list": ["sgd", "adam", "ftrl"]}} + ] + rawTemplate = { + "apiVersion": "batch/v1", + "kind": "Job", + "metadata": { + "name": "{{.Trial}}", + "namespace": "{{.NameSpace}}" + }, + "spec": { + "template": { + "spec": { + "restartPolicy": "Never", + "containers": [ + {"name": "{{.Trial}}", + "image": "docker.io/katib/mxnet-mnist-example", + "command": [ + "python /mxnet/example/image-classification/train_mnist.py --batch-size=64 {{- with .HyperParameters}} {{- range .}} {{.Name}}={{.Value}} {{- end}} {{- end}}" + ] + } + ] + } + } + } + } + trialTemplate = { + "goTemplate": { + "rawTemplate": json.dumps(rawTemplate) + } + } + katib_experiment_launcher_op = components.load_component_from_file("./component.yaml") + # katib_experiment_launcher_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/katib-launcher/component.yaml') + op1 = katib_experiment_launcher_op( + experiment_name=name, + experiment_namespace=namespace, + parallel_trial_count=parallelTrialCount, + max_trial_count=maxTrialCount, + objective=str(objectiveConfig), + algorithm=str(algorithmConfig), + trial_template=str(trialTemplate), + parameters=str(parameters), + experiment_timeout_minutes=experimentTimeoutMinutes, + delete_finished_experiment=deleteAfterDone) + + op_out = dsl.ContainerOp( + name="my-out-cop", + image="library/bash:4.4.23", + command=["sh", "-c"], + arguments=["echo hyperparameter: %s" % op1.output], + ) + +if __name__ == "__main__": + import kfp.compiler as compiler + compiler.Compiler().compile(mnist_hpo, __file__ + ".tar.gz") diff --git a/components/kubeflow/katib-launcher/src/__init__.py b/components/kubeflow/katib-launcher/src/__init__.py index 4df8965533d..ddd71c00285 100644 --- a/components/kubeflow/katib-launcher/src/__init__.py +++ b/components/kubeflow/katib-launcher/src/__init__.py @@ -11,5 +11,3 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from .kubeflow_katib_launcher_op import kubeflow_studyjob_launcher_op diff --git a/components/kubeflow/katib-launcher/src/hp.template.yaml b/components/kubeflow/katib-launcher/src/hp.template.yaml deleted file mode 100644 index 42494859ac0..00000000000 --- a/components/kubeflow/katib-launcher/src/hp.template.yaml +++ /dev/null @@ -1,49 +0,0 @@ -apiVersion: "kubeflow.org/v1alpha1" -kind: StudyJob -metadata: - namespace: kubeflow - labels: - controller-tools.k8s.io: "1.0" - name: study-example -spec: - studyName: study-example - owner: crd - optimizationtype: "" - objectivevaluename: "" - optimizationgoal: 0.99 - requestcount: 4 - metricsnames: - - accuracy_1 - nasConfig: - graphConfig: - numLayers: 8 - inputSize: - - 32 - - 32 - - 3 - outputSize: - - 10 - operations: - - operationType: convolution - parameterconfigs: - - name: filter_size - parametertype: categorical - feasible: - list: - - "3" - - "5" - - "7" - parameterconfigs: - - name: --learning_rate - parametertype: double - feasible: - min: "0.01" - max: "0.05" - workerSpec: - goTemplate: - templatePath: "" - metricsCollectorSpec: - goTemplate: - templatePath: "" - suggestionSpec: - suggestionAlgorithm: "random" diff --git a/components/kubeflow/katib-launcher/src/launch_experiment.py b/components/kubeflow/katib-launcher/src/launch_experiment.py new file mode 100644 index 00000000000..a4bce6a99c3 --- /dev/null +++ b/components/kubeflow/katib-launcher/src/launch_experiment.py @@ -0,0 +1,140 @@ +# Copyright 2019 kubeflow.org. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import datetime +from distutils.util import strtobool +import json +import os +import logging +import yaml +import uuid +import launch_crd + +from kubernetes import client as k8s_client +from kubernetes import config + +def yamlOrJsonStr(str): + if str == "" or str == None: + return None + try: + return json.loads(str) + except: + return yaml.safe_load(str) + +ExperimentGroup = "kubeflow.org" +ExperimentPlural = "experiments" + +class Experiment(launch_crd.K8sCR): + def __init__(self, version="v1alpha3", client=None): + super(Experiment, self).__init__(ExperimentGroup, ExperimentPlural, version, client) + + def is_expected_conditions(self, inst, expected_conditions): + conditions = inst.get('status', {}).get("conditions") + if not conditions: + return False, "" + if conditions[-1]["type"] in expected_conditions: + return True, conditions[-1]["type"] + else: + return False, conditions[-1]["type"] + +def main(argv=None): + parser = argparse.ArgumentParser(description='Kubeflow Experiment launcher') + parser.add_argument('--name', type=str, + help='Experiment name.') + parser.add_argument('--namespace', type=str, + default='kubeflow', + help='Experiment namespace.') + parser.add_argument('--version', type=str, + default='v1alpha3', + help='Experiment version.') + parser.add_argument('--maxTrialCount', type=int, + help='How many trial will be created for the experiment at most.') + parser.add_argument('--maxFailedTrialCount', type=int, + help='Stop the experiment when $maxFailedTrialCount trials failed.') + parser.add_argument('--parallelTrialCount', type=int, + default=3, + help='How many trials can be running at most.') + parser.add_argument('--objectiveConfig', type=yamlOrJsonStr, + default={}, + help='Experiment objective.') + parser.add_argument('--algorithmConfig', type=yamlOrJsonStr, + default={}, + help='Experiment algorithm.') + parser.add_argument('--trialTemplate', type=yamlOrJsonStr, + default={}, + help='Experiment trialTemplate.') + parser.add_argument('--parameters', type=yamlOrJsonStr, + default=[], + help='Experiment parameters.') + parser.add_argument('--metricsCollector', type=yamlOrJsonStr, + default={}, + help='Experiment metricsCollectorSpec.') + parser.add_argument('--outputFile', type=str, + default='/output.txt', + help='The file which stores the best trial of the experiment.') + parser.add_argument('--deleteAfterDone', type=strtobool, + default=True, + help='When experiment done, delete the experiment automatically if it is True.') + parser.add_argument('--experimentTimeoutMinutes', type=int, + default=60*24, + help='Time in minutes to wait for the Experiment to reach end') + + args = parser.parse_args() + + logging.getLogger().setLevel(logging.INFO) + + logging.info('Generating experiment template.') + + config.load_incluster_config() + api_client = k8s_client.ApiClient() + experiment = Experiment(version=args.version, client=api_client) + exp_name = (args.name+'-'+uuid.uuid4().hex)[0:63] + + inst = { + "apiVersion": "%s/%s" % (ExperimentGroup, args.version), + "kind": "Experiment", + "metadata": { + "name": exp_name, + "namespace": args.namespace, + }, + "spec": { + "algorithm": args.algorithmConfig, + "maxFailedTrialCount": args.maxFailedTrialCount, + "maxTrialCount": args.maxTrialCount, + "metricsCollectorSpec": args.metricsCollector, + "objective": args.objectiveConfig, + "parallelTrialCount": args.parallelTrialCount, + "parameters": args.parameters, + "trialTemplate": args.trialTemplate, + }, + } + create_response = experiment.create(inst) + + expected_conditions = ["Succeeded", "Failed"] + current_inst = experiment.wait_for_condition( + args.namespace, exp_name, expected_conditions, + timeout=datetime.timedelta(minutes=args.experimentTimeoutMinutes)) + expected, conditon = experiment.is_expected_conditions(current_inst, ["Succeeded"]) + if expected: + paramAssignments = current_inst["status"]["currentOptimalTrial"]["parameterAssignments"] + if not os.path.exists(os.path.dirname(args.outputFile)): + os.makedirs(os.path.dirname(args.outputFile)) + with open(args.outputFile, 'w') as f: + f.write(json.dumps(paramAssignments)) + if args.deleteAfterDone: + experiment.delete(exp_name, args.namespace) + +if __name__== "__main__": + main() diff --git a/components/kubeflow/katib-launcher/src/launch_study_job.py b/components/kubeflow/katib-launcher/src/launch_study_job.py deleted file mode 100644 index ebaa9e66fe7..00000000000 --- a/components/kubeflow/katib-launcher/src/launch_study_job.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import argparse -import datetime -from distutils.util import strtobool -import json -import os -import logging -import requests -import subprocess -import yaml -import grpc - -import api_pb2 -import api_pb2_grpc - -from kubernetes import client as k8s_client -from kubernetes import config -import study_job_client - -def yamlOrJsonStr(str): - if str == "" or str == None: - return None - try: - return json.loads(str) - except: - return yaml.safe_load(str) - -def strToList(str): - return str.split(",") - -def _update_or_pop(spec, name, value): - if value: - spec[name] = value - else: - spec.pop(name) - -def _generate_studyjob_yaml(src_filename, name, namespace, optimizationtype, objectivevaluename, optimizationgoal, requestcount, - metricsnames, parameterconfigs, nasConfig, workertemplatepath, mcollectortemplatepath, suggestionspec): - """_generate_studyjob_yaml generates studyjob yaml file based on hp.template.yaml""" - with open(src_filename, 'r') as f: - content = yaml.safe_load(f) - - content['metadata']['name'] = name - content['metadata']['namespace'] = namespace - content['spec']['studyName'] = name - content['spec']['optimizationtype'] = optimizationtype - content['spec']['objectivevaluename'] = objectivevaluename - content['spec']['optimizationgoal'] = optimizationgoal - content['spec']['requestcount'] = requestcount - - _update_or_pop(content['spec'], 'parameterconfigs', parameterconfigs) - _update_or_pop(content['spec'], 'nasConfig', nasConfig) - _update_or_pop(content['spec'], 'metricsnames', metricsnames) - _update_or_pop(content['spec'], 'suggestionSpec', suggestionspec) - - if workertemplatepath: - content['spec']['workerSpec']['goTemplate']['templatePath'] = workertemplatepath - else: - content['spec'].pop('workerSpec') - - if mcollectortemplatepath: - content['spec']['metricsCollectorSpec']['goTemplate']['templatePath'] = mcollectortemplatepath - else : - content['spec'].pop('metricsCollectorSpec') - - return content - -def get_best_trial(trial_id): - vizier_core = "vizier-core.kubeflow:6789" - with grpc.insecure_channel(vizier_core) as channel: - stub = api_pb2_grpc.ManagerStub(channel) - response = stub.GetTrial(api_pb2.GetTrialRequest(trial_id=trial_id)) - return response.trial - -def main(argv=None): - parser = argparse.ArgumentParser(description='Kubeflow StudyJob launcher') - parser.add_argument('--name', type=str, - help='StudyJob name.') - parser.add_argument('--namespace', type=str, - default='kubeflow', - help='StudyJob namespace.') - parser.add_argument('--optimizationtype', type=str, - default='minimize', - help='Direction of optimization. minimize or maximize.') - parser.add_argument('--objectivevaluename', type=str, - help='Objective value name which trainer optimizes.') - parser.add_argument('--optimizationgoal', type=float, - help='Stop studying once objectivevaluename value ' + - 'exceeds optimizationgoal') - parser.add_argument('--requestcount', type=int, - default=1, - help='The times asking request to suggestion service.') - parser.add_argument('--metricsnames', type=strToList, - help='StudyJob metrics name list.') - parser.add_argument('--parameterconfigs', type=yamlOrJsonStr, - default={}, - help='StudyJob parameterconfigs.') - parser.add_argument('--nasConfig',type=yamlOrJsonStr, - default={}, - help='StudyJob nasConfig.') - parser.add_argument('--workertemplatepath', type=str, - default="", - help='StudyJob worker spec.') - parser.add_argument('--mcollectortemplatepath', type=str, - default="", - help='StudyJob worker spec.') - parser.add_argument('--suggestionspec', type=yamlOrJsonStr, - default={}, - help='StudyJob suggestion spec.') - parser.add_argument('--outputfile', type=str, - default='/output.txt', - help='The file which stores the best trial of the studyJob.') - parser.add_argument('--deleteAfterDone', type=strtobool, - default=True, - help='When studyjob done, delete the studyjob automatically if it is True.') - parser.add_argument('--studyjobtimeoutminutes', type=int, - default=10, - help='Time in minutes to wait for the StudyJob to complete') - - args = parser.parse_args() - - logging.getLogger().setLevel(logging.INFO) - - logging.info('Generating studyjob template.') - template_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'hp.template.yaml') - content_yaml = _generate_studyjob_yaml(template_file, args.name, args.namespace, args.optimizationtype, args.objectivevaluename, - args.optimizationgoal, args.requestcount, args.metricsnames, args.parameterconfigs, - args.nasConfig, args.workertemplatepath, args.mcollectortemplatepath, args.suggestionspec) - - config.load_incluster_config() - api_client = k8s_client.ApiClient() - create_response = study_job_client.create_study_job(api_client, content_yaml) - job_name = create_response['metadata']['name'] - job_namespace = create_response['metadata']['namespace'] - - expected_condition = ["Completed", "Failed"] - wait_response = study_job_client.wait_for_condition( - api_client, job_namespace, job_name, expected_condition, - timeout=datetime.timedelta(minutes=args.studyjobtimeoutminutes)) - succ = False - if wait_response.get("status", {}).get("condition") == "Completed": - succ = True - trial = get_best_trial(wait_response["status"]["bestTrialId"]) - if not os.path.exists(os.path.dirname(args.outputfile)): - os.makedirs(os.path.dirname(args.outputfile)) - with open(args.outputfile, 'w') as f: - ps_dict = {} - for ps in trial.parameter_set: - ps_dict[ps.name] = ps.value - f.write(json.dumps(ps_dict)) - if succ: - logging.info('Study success.') - if args.deleteAfterDone: - study_job_client.delete_study_job(api_client, job_name, job_namespace) - -if __name__== "__main__": - main() diff --git a/components/kubeflow/kfserving/Dockerfile b/components/kubeflow/kfserving/Dockerfile index 9bf234dd3e9..82f655f0a84 100644 --- a/components/kubeflow/kfserving/Dockerfile +++ b/components/kubeflow/kfserving/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.6-slim -RUN pip install kubernetes==9.0.0 kfserving==0.1.1 requests==2.22.0 +RUN pip3 install kubernetes==10.0.1 kfserving==0.2.1 requests==2.22.0 Flask==1.1.1 flask-cors==3.0.8 ENV APP_HOME /app COPY src $APP_HOME diff --git a/components/kubeflow/kfserving/OWNERS b/components/kubeflow/kfserving/OWNERS new file mode 100644 index 00000000000..78d93353d16 --- /dev/null +++ b/components/kubeflow/kfserving/OWNERS @@ -0,0 +1,5 @@ +approvers: + - animeshsingh +reviewers: + - animeshsingh + - Tomcli diff --git a/components/kubeflow/kfserving/component.yaml b/components/kubeflow/kfserving/component.yaml index 33be72af97e..ea59664ae87 100644 --- a/components/kubeflow/kfserving/component.yaml +++ b/components/kubeflow/kfserving/component.yaml @@ -8,13 +8,15 @@ inputs: - {name: Canary Model Traffic Percentage, type: String, default: '0', description: 'Optional Traffic to be sent to default model'} - {name: Namespace, type: String, default: 'kubeflow', description: 'Kubernetes namespace where the KFServing service is deployed.'} - {name: Framework, type: String, default: 'tensorflow', description: 'Machine Learning Framework for Model Serving.'} - - {name: default_custom_model_spec, type: String, default: '{}', description: 'Custom runtime default custom model container spec.'} - - {name: canary_custom_model_spec, type: String, default: '{}', description: 'Custom runtime canary custom model container spec.'} + - {name: Default Custom Model Spec, type: String, default: '{}', description: 'Custom runtime default custom model container spec.'} + - {name: Canary Custom Model Spec, type: String, default: '{}', description: 'Custom runtime canary custom model container spec.'} + - {name: Autoscaling Target, type: String, default: '0', description: 'Autoscaling Target Number'} + - {name: KFServing Endpoint, type: String, default: '', description: 'KFServing remote deployer API endpoint'} outputs: - - {name: Endpoint URI, type: String, description: 'URI of the deployed prediction service..'} + - {name: Service Endpoint URI, type: String, description: 'URI of the deployed prediction service..'} implementation: container: - image: animeshsingh/kfserving-component + image: aipipeline/kfserving-component:v0.2.1 command: ['python'] args: [ -u, kfservingdeployer.py, @@ -25,7 +27,9 @@ implementation: --canary-model-traffic, {inputValue: Canary Model Traffic Percentage}, --namespace, {inputValue: Namespace}, --framework, {inputValue: Framework}, - --default-custom-model-spec,{inputValue: default_custom_model_spec}, - --canary-custom-model-spec, {inputValue: canary_custom_model_spec}, - --output_path, {outputPath: Endpoint URI} + --default-custom-model-spec,{inputValue: Default Custom Model Spec}, + --canary-custom-model-spec, {inputValue: Canary Custom Model Spec}, + --kfserving-endpoint, {inputValue: KFServing Endpoint}, + --autoscaling-target, {inputValue: Autoscaling Target}, + --output_path, {outputPath: Service Endpoint URI} ] diff --git a/components/kubeflow/kfserving/sample-pipeline.py b/components/kubeflow/kfserving/sample-pipeline.py deleted file mode 100644 index 5d34c0e3865..00000000000 --- a/components/kubeflow/kfserving/sample-pipeline.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright 2019 IBM Corporation -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import kfp.dsl as dsl -from kfp import components -import json - -kfserving_op = components.load_component_from_file('component.yaml') - -@dsl.pipeline( - name='kfserving pipeline', - description='A pipeline for kfserving.' -) -def kfservingPipeline( - action = 'create', - model_name='tensorflow-sample', - default_model_uri='gs://kfserving-samples/models/tensorflow/flowers', - canary_model_uri='gs://kfserving-samples/models/tensorflow/flowers', - canary_model_traffic_percentage='10', - namespace='kubeflow', - framework='tensorflow' -): - - # define workflow - kfserving = kfserving_op(action = action, - model_name=model_name, - default_model_uri=default_model_uri, - canary_model_uri=canary_model_uri, - canary_model_traffic_percentage=canary_model_traffic_percentage, - namespace=namespace, - framework=framework) - -if __name__ == '__main__': - import kfp.compiler as compiler - compiler.Compiler().compile(kfservingPipeline, __file__ + '.tar.gz') diff --git a/components/kubeflow/kfserving/src/app.py b/components/kubeflow/kfserving/src/app.py new file mode 100644 index 00000000000..21aa7f0d687 --- /dev/null +++ b/components/kubeflow/kfserving/src/app.py @@ -0,0 +1,55 @@ +# Copyright 2019 kubeflow.org. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from flask import Flask, request, abort +from flask_cors import CORS +import json +import os + +from kfservingdeployer import deploy_model + +app = Flask(__name__) +CORS(app) + + +@app.route('/deploy-model', methods=['POST']) +def deploy_model_post(): + if not request.json: + abort(400) + return json.dumps(deploy_model( + action=request.json['action'], + model_name=request.json['model_name'], + default_model_uri=request.json['default_model_uri'], + canary_model_uri=request.json['canary_model_uri'], + canary_model_traffic=request.json['canary_model_traffic'], + namespace=request.json['namespace'], + framework=request.json['framework'], + default_custom_model_spec=request.json['default_custom_model_spec'], + canary_custom_model_spec=request.json['canary_custom_model_spec'], + autoscaling_target=request.json['autoscaling_target'] + )) + + +@app.route('/', methods=['GET']) +def root_get(): + return 200 + + +@app.route('/', methods=['OPTIONS']) +def root_options(): + return "200" + + +if __name__ == "__main__": + app.run(debug=True, host='0.0.0.0', port=int(os.environ.get('PORT', 8080))) diff --git a/components/kubeflow/kfserving/src/kfservingdeployer.py b/components/kubeflow/kfserving/src/kfservingdeployer.py index 8763ea6a7bc..4803c85b896 100644 --- a/components/kubeflow/kfserving/src/kfservingdeployer.py +++ b/components/kubeflow/kfserving/src/kfservingdeployer.py @@ -1,51 +1,57 @@ -# Copyright 2019 IBM Corporation +# Copyright 2019 kubeflow.org. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + import json import argparse import os +import requests +import re from kubernetes import client from kfserving import KFServingClient from kfserving import constants -from kfserving import V1alpha1ModelSpec -from kfserving import V1alpha1TensorflowSpec -from kfserving import V1alpha1PyTorchSpec -from kfserving import V1alpha1SKLearnSpec -from kfserving import V1alpha1XGBoostSpec -from kfserving import V1alpha1TensorRTSpec -from kfserving import V1alpha1CustomSpec -from kfserving import V1alpha1KFServiceSpec -from kfserving import V1alpha1KFService - - -def ModelSpec(framework, model_uri): +from kfserving import V1alpha2EndpointSpec +from kfserving import V1alpha2PredictorSpec +from kfserving import V1alpha2TensorflowSpec +from kfserving import V1alpha2PyTorchSpec +from kfserving import V1alpha2SKLearnSpec +from kfserving import V1alpha2XGBoostSpec +from kfserving.models.v1alpha2_onnx_spec import V1alpha2ONNXSpec +from kfserving import V1alpha2TensorRTSpec +from kfserving import V1alpha2CustomSpec +from kfserving import V1alpha2InferenceServiceSpec +from kfserving import V1alpha2InferenceService + +def EndpointSpec(framework, storage_uri): if framework == 'tensorflow': - return V1alpha1ModelSpec(tensorflow=V1alpha1TensorflowSpec(model_uri=model_uri)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(tensorflow=V1alpha2TensorflowSpec(storage_uri=storage_uri))) elif framework == 'pytorch': - return V1alpha1ModelSpec(pytorch=V1alpha1PyTorchSpec(model_uri=model_uri)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(pytorch=V1alpha2PyTorchSpec(storage_uri=storage_uri))) elif framework == 'sklearn': - return V1alpha1ModelSpec(sklearn=V1alpha1SKLearnSpec(model_uri=model_uri)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(sklearn=V1alpha2SKLearnSpec(storage_uri=storage_uri))) elif framework == 'xgboost': - return V1alpha1ModelSpec(xgboost=V1alpha1XGBoostSpec(model_uri=model_uri)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(xgboost=V1alpha2XGBoostSpec(storage_uri=storage_uri))) + elif framework == 'onnx': + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(onnx=V1alpha2ONNXSpec(storage_uri=storage_uri))) elif framework == 'tensorrt': - return V1alpha1ModelSpec(tensorrt=V1alpha1TensorRTSpec(model_uri=model_uri)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(tensorrt=V1alpha2TensorRTSpec(storage_uri=storage_uri))) else: raise("Error: No matching framework: " + framework) -def customModelSpec(custom_model_spec): +def customEndpointSpec(custom_model_spec): env = [client.V1EnvVar(name=i['name'], value=i['value']) for i in custom_model_spec['env']] if custom_model_spec.get('env', '') else None ports = [client.V1ContainerPort(container_port=int(custom_model_spec.get('port', '')))] if custom_model_spec.get('port', '') else None containerSpec = client.V1Container( @@ -58,17 +64,58 @@ def customModelSpec(custom_model_spec): image_pull_policy=custom_model_spec.get('image_pull_policy', None), working_dir=custom_model_spec.get('working_dir', None) ) - return V1alpha1ModelSpec(custom=V1alpha1CustomSpec(container=containerSpec)) + return V1alpha2EndpointSpec(predictor=V1alpha2PredictorSpec(custom=V1alpha2CustomSpec(container=containerSpec))) -def kfserving_deployment(metadata, default_model_spec, canary_model_spec=None, canary_model_traffic=None): - return V1alpha1KFService(api_version=constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION, +def InferenceService(metadata, default_model_spec, canary_model_spec=None, canary_model_traffic=None): + return V1alpha2InferenceService(api_version=constants.KFSERVING_GROUP + '/' + constants.KFSERVING_VERSION, kind=constants.KFSERVING_KIND, metadata=metadata, - spec=V1alpha1KFServiceSpec(default=default_model_spec, - canary=canary_model_spec, - canary_traffic_percent=canary_model_traffic)) + spec=V1alpha2InferenceServiceSpec(default=default_model_spec, + canary=canary_model_spec, + canary_traffic_percent=canary_model_traffic)) + + +def deploy_model(action, model_name, default_model_uri, canary_model_uri, canary_model_traffic, namespace, framework, default_custom_model_spec, canary_custom_model_spec, autoscaling_target=0): + if int(autoscaling_target) != 0: + annotations = {"autoscaling.knative.dev/target": str(autoscaling_target)} + else: + annotations = None + metadata = client.V1ObjectMeta(name=model_name, namespace=namespace, annotations=annotations) + + # Create Default deployment if default model uri is provided. + if framework != 'custom' and default_model_uri: + default_model_spec = EndpointSpec(framework, default_model_uri) + elif framework == 'custom' and default_custom_model_spec: + default_model_spec = customEndpointSpec(default_custom_model_spec) + + # Create Canary deployment if canary model uri is provided. + if framework != 'custom' and canary_model_uri: + canary_model_spec = EndpointSpec(framework, canary_model_uri) + kfsvc = InferenceService(metadata, default_model_spec, canary_model_spec, canary_model_traffic) + elif framework == 'custom' and canary_custom_model_spec: + canary_model_spec = customEndpointSpec(canary_custom_model_spec) + kfsvc = InferenceService(metadata, default_model_spec, canary_model_spec, canary_model_traffic) + else: + kfsvc = InferenceService(metadata, default_model_spec) + + KFServing = KFServingClient() + if action == 'create': + KFServing.create(kfsvc, watch=True, timeout_seconds=120) + elif action == 'update': + KFServing.patch(model_name, kfsvc) + elif action == 'rollout': + KFServing.rollout_canary(model_name, canary=canary_model_spec, percent=canary_model_traffic, namespace=namespace, watch=True, timeout_seconds=120) + elif action == 'promote': + KFServing.promote(model_name, namespace=namespace, watch=True, timeout_seconds=120) + elif action == 'delete': + KFServing.delete(model_name, namespace=namespace) + else: + raise("Error: No matching action: " + action) + + model_status = KFServing.get(model_name, namespace=namespace) + return model_status if __name__ == "__main__": parser = argparse.ArgumentParser() @@ -81,9 +128,13 @@ def kfserving_deployment(metadata, default_model_spec, canary_model_spec=None, c parser.add_argument('--framework', type=str, help='Model Serving Framework', default='tensorflow') parser.add_argument('--default-custom-model-spec', type=json.loads, help='Custom runtime default custom model container spec', default={}) parser.add_argument('--canary-custom-model-spec', type=json.loads, help='Custom runtime canary custom model container spec', default={}) + parser.add_argument('--kfserving-endpoint', type=str, help='kfserving remote deployer api endpoint', default='') + parser.add_argument('--autoscaling-target', type=str, help='Autoscaling target number', default='0') parser.add_argument('--output_path', type=str, help='Path to store URI output') args = parser.parse_args() + url = re.compile(r"https?://") + action = args.action.lower() model_name = args.model_name default_model_uri = args.default_model_uri @@ -94,36 +145,48 @@ def kfserving_deployment(metadata, default_model_spec, canary_model_spec=None, c output_path = args.output_path default_custom_model_spec = args.default_custom_model_spec canary_custom_model_spec = args.canary_custom_model_spec - - metadata = client.V1ObjectMeta(name=model_name, namespace=namespace) - if framework != 'custom': - default_model_spec = ModelSpec(framework, default_model_uri) - else: - default_model_spec = customModelSpec(default_custom_model_spec) - # Create Canary deployment if canary model uri is provided. - if framework != 'custom' and canary_model_uri: - canary_model_spec = ModelSpec(framework, canary_model_uri) - kfsvc = kfserving_deployment(metadata, default_model_spec, canary_model_spec, canary_model_traffic) - elif framework == 'custom' and canary_custom_model_spec: - canary_model_spec = customModelSpec(canary_custom_model_spec) - kfsvc = kfserving_deployment(metadata, default_model_spec, canary_model_spec, canary_model_traffic) + kfserving_endpoint = url.sub('', args.kfserving_endpoint) + autoscaling_target = int(args.autoscaling_target) + + if kfserving_endpoint: + formData = { + "action": action, + "model_name": model_name, + "default_model_uri": default_model_uri, + "canary_model_uri": canary_model_uri, + "canary_model_traffic": canary_model_traffic, + "namespace": namespace, + "framework": framework, + "default_custom_model_spec": default_custom_model_spec, + "canary_custom_model_spec": canary_custom_model_spec, + "autoscaling_target": autoscaling_target + } + response = requests.post("http://" + kfserving_endpoint + "/deploy-model", json=formData) + model_status = response.json() else: - kfsvc = kfserving_deployment(metadata, default_model_spec) - - KFServing = KFServingClient() - - if action == 'create': - KFServing.create(kfsvc) - elif action == 'update': - KFServing.patch(model_name, kfsvc) - elif action == 'delete': - KFServing.delete(model_name, namespace=namespace) - else: - raise("Error: No matching action: " + action) - - model_status = KFServing.get(model_name, namespace=namespace) + model_status = deploy_model( + action=action, + model_name=model_name, + default_model_uri=default_model_uri, + canary_model_uri=canary_model_uri, + canary_model_traffic=canary_model_traffic, + namespace=namespace, + framework=framework, + default_custom_model_spec=default_custom_model_spec, + canary_custom_model_spec=canary_custom_model_spec, + autoscaling_target=autoscaling_target + ) print(model_status) - + try: + print(model_status['status']['url'] + ' is the knative domain header. $ISTIO_INGRESS_ENDPOINT are defined in the below commands') + print('Sample test commands: ') + print('# Note: If Istio Ingress gateway is not served with LoadBalancer, use $CLUSTER_NODE_IP:31380 as the ISTIO_INGRESS_ENDPOINT') + print('ISTIO_INGRESS_ENDPOINT=$(kubectl -n istio-system get service istio-ingressgateway -o jsonpath=\'{.status.loadBalancer.ingress[0].ip}\')') + # model_status['status']['url'] is like http://flowers-sample.kubeflow.example.com/v1/models/flowers-sample + host, path = url.sub('', model_status['status']['url']).split("/", 1) + print('curl -X GET -H "Host: ' + host + '" http://$ISTIO_INGRESS_ENDPOINT/' + path) + except: + print('Model is not ready, check the logs for the Knative URL status.') if not os.path.exists(os.path.dirname(output_path)): os.makedirs(os.path.dirname(output_path)) with open(output_path, "w") as report: diff --git a/components/kubeflow/launcher/Dockerfile b/components/kubeflow/launcher/Dockerfile index ed7f892d7fa..fd2309e119e 100644 --- a/components/kubeflow/launcher/Dockerfile +++ b/components/kubeflow/launcher/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2018 The Kubeflow Authors +# Copyright 2019 The Kubeflow Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,52 +11,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - FROM ubuntu:16.04 -ARG TRAINER_IMAGE_NAME - -RUN apt-get update -y - -RUN apt-get install --no-install-recommends -y -q ca-certificates python-dev python-setuptools wget unzip git - -RUN easy_install pip - -RUN pip install pyyaml==3.12 six==1.11.0 requests==2.18.4 tensorflow==1.7.0 \ - kubernetes google-api-python-client retrying - -RUN wget -nv https://dl.google.com/dl/cloudsdk/release/google-cloud-sdk.zip && \ - unzip -qq google-cloud-sdk.zip -d tools && \ - rm google-cloud-sdk.zip && \ - tools/google-cloud-sdk/install.sh --usage-reporting=false \ - --path-update=false --bash-completion=false \ - --disable-installation-options && \ - tools/google-cloud-sdk/bin/gcloud -q components update \ - gcloud core gsutil && \ - tools/google-cloud-sdk/bin/gcloud -q components install kubectl && \ - tools/google-cloud-sdk/bin/gcloud config set component_manager/disable_update_check true && \ - touch /tools/google-cloud-sdk/lib/third_party/google.py - -RUN wget -nv https://github.com/ksonnet/ksonnet/releases/download/v0.9.0/ks_0.9.0_linux_amd64.tar.gz && \ - tar -xzf ks_0.9.0_linux_amd64.tar.gz && \ - mkdir -p /tools/ks/bin && \ - cp ./ks_0.9.0_linux_amd64/ks /tools/ks/bin && \ - rm ks_0.9.0_linux_amd64.tar.gz && \ - rm -r ks_0.9.0_linux_amd64 - -RUN wget https://github.com/kubeflow/tf-operator/archive/v0.3.0.zip && \ - unzip v0.3.0.zip && \ - mv tf-operator-0.3.0 tf-operator - -ENV PYTHONPATH $PYTHONPATH:/tf-operator - -ENV PATH $PATH:/tools/google-cloud-sdk/bin:/tools/ks/bin - -ENV TRAINER_IMAGE_NAME $TRAINER_IMAGE_NAME +RUN apt-get update -y && \ + apt-get install --no-install-recommends -y -q ca-certificates python-dev python-setuptools wget && \ + easy_install pip && \ + pip install pyyaml==3.12 kubernetes ADD build /ml RUN mkdir /usr/licenses && \ /ml/license.sh /ml/third_party_licenses.csv /usr/licenses -ENTRYPOINT ["python", "/ml/launch_tf_job.py"] \ No newline at end of file +ENTRYPOINT ["python", "/ml/launch_tfjob.py"] diff --git a/components/kubeflow/launcher/OWNERS b/components/kubeflow/launcher/OWNERS new file mode 100644 index 00000000000..808ae38f7a9 --- /dev/null +++ b/components/kubeflow/launcher/OWNERS @@ -0,0 +1,4 @@ +approvers: + - hougangliu +reviewers: + - hougangliu diff --git a/components/kubeflow/launcher/build_image.sh b/components/kubeflow/launcher/build_image.sh index a1e22dd43c1..dc556368667 100755 --- a/components/kubeflow/launcher/build_image.sh +++ b/components/kubeflow/launcher/build_image.sh @@ -1,5 +1,5 @@ #!/bin/bash -e -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,13 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. - while getopts ":hp:t:i:" opt; do case "${opt}" in h) echo "-p: project name" - echo "-t: tag name" - echo "-i: image name. If provided, project name and tag name are not necessary" - exit + echo "-t: tag name" + echo "-i: image name. If provided, project name and tag name are not necessary" + exit ;; p) PROJECT_ID=${OPTARG} ;; @@ -33,56 +32,28 @@ while getopts ":hp:t:i:" opt; do esac done -LOCAL_LAUNCHER_IMAGE_NAME=ml-pipeline-kubeflow-tf -LOCAL_TRAINER_IMAGE_NAME=ml-pipeline-kubeflow-tf-trainer - -if [ -z "${PROJECT_ID}" ]; then - PROJECT_ID=$(gcloud config config-helper --format "value(configuration.properties.core.project)") -fi - -if [ -z "${TAG_NAME}" ]; then - TAG_NAME=$(date +v%Y%m%d)-$(git describe --tags --always --dirty)-$(git diff | shasum -a256 | cut -c -6) -fi - mkdir -p ./build rsync -arvp ./src/ ./build/ +rsync -arvp ../common/ ./build/ cp ../../license.sh ./build cp ../../third_party_licenses.csv ./build -# Build the trainer image -if [ -z "${LAUNCHER_IMAGE_NAME}" ]; then - TRAINER_IMAGE_NAME=gcr.io/${PROJECT_ID}/${LOCAL_TRAINER_IMAGE_NAME}:${TAG_NAME} -else - # construct the trainer image name as "laucher_image_name"-trainer:"launcher_image_tag" - colon_index=`expr index "${LAUNCHER_IMAGE_NAME}" :` - if [ $colon_index == '0' ]; then - TRAINER_IMAGE_NAME=${LAUNCHER_IMAGE_NAME}-trainer - else - tag=${LAUNCHER_IMAGE_NAME:$colon_index} - TRAINER_IMAGE_NAME=${LAUNCHER_IMAGE_NAME:0:$colon_index-1}-trainer:${tag} - fi -fi +LOCAL_LAUNCHER_IMAGE_NAME=ml-pipeline-kubeflow-tfjob -bash_dir=`dirname $0` -bash_dir_abs=`realpath $bash_dir` -parent_dir=`dirname ${bash_dir_abs}` -trainer_dir=${parent_dir}/dnntrainer -cd ${trainer_dir} -if [ -z "${LAUNCHER_IMAGE_NAME}" ]; then - ./build_image.sh -p ${PROJECT_ID} -t ${TAG_NAME} -else - ./build_image.sh -i ${TRAINER_IMAGE_NAME} +docker build -t ${LOCAL_LAUNCHER_IMAGE_NAME} . +if [ -z "${TAG_NAME}" ]; then + TAG_NAME=$(date +v%Y%m%d)-$(git describe --tags --always --dirty)-$(git diff | shasum -a256 | cut -c -6) fi -cd - - -docker build -t ${LOCAL_LAUNCHER_IMAGE_NAME} . --build-arg TRAINER_IMAGE_NAME=${TRAINER_IMAGE_NAME} if [ -z "${LAUNCHER_IMAGE_NAME}" ]; then + if [ -z "${PROJECT_ID}" ]; then + PROJECT_ID=$(gcloud config config-helper --format "value(configuration.properties.core.project)") + fi docker tag ${LOCAL_LAUNCHER_IMAGE_NAME} gcr.io/${PROJECT_ID}/${LOCAL_LAUNCHER_IMAGE_NAME}:${TAG_NAME} docker push gcr.io/${PROJECT_ID}/${LOCAL_LAUNCHER_IMAGE_NAME}:${TAG_NAME} else - docker tag ${LOCAL_LAUNCHER_IMAGE_NAME} "${LAUNCHER_IMAGE_NAME}" - docker push "${LAUNCHER_IMAGE_NAME}" + docker tag ${LOCAL_LAUNCHER_IMAGE_NAME} ${LAUNCHER_IMAGE_NAME}:${TAG_NAME} + docker push ${LAUNCHER_IMAGE_NAME}:${TAG_NAME} fi rm -rf ./build diff --git a/components/kubeflow/launcher/component.yaml b/components/kubeflow/launcher/component.yaml new file mode 100644 index 00000000000..3bddf5ec497 --- /dev/null +++ b/components/kubeflow/launcher/component.yaml @@ -0,0 +1,35 @@ +name: Kubeflow - Launch TFJob +description: Kubeflow TFJob launcher +inputs: +- {name: Name, type: String, description: 'TFJob name.'} +- {name: Namespace, type: String, default: kubeflow, description: 'TFJob namespace.'} +- {name: Version, type: String, default: v1, description: 'TFJob version.'} +- {name: ActiveDeadlineSeconds, type: Integer, default: -1, description: 'Specifies the duration (in seconds) since startTime during which the job can remain active before it is terminated. Must be a positive integer. This setting applies only to pods where restartPolicy is OnFailure or Always.'} +- {name: BackoffLimit, type: Integer, default: -1, description: 'Number of retries before marking this job as failed.'} +- {name: ttl Seconds After Finished, type: Integer, default: -1, description: 'Defines the TTL for cleaning up finished TFJobs.'} +- {name: CleanPodPolicy, type: String, default: Running, description: 'Defines the policy for cleaning up pods after the TFJob completes.'} +- {name: PS Spec, type: JSON, default: '{}', description: 'TFJob ps replicaSpecs.'} +- {name: Worker Spec, type: JSON, default: '{}', description: 'TFJob worker replicaSpecs.'} +- {name: Chief Spec, type: JSON, default: '{}', description: 'TFJob chief replicaSpecs.'} +- {name: Evaluator Spec, type: JSON, default: '{}', description: 'TFJob evaluator replicaSpecs.'} +- {name: Tfjob Timeout Minutes, type: Integer, default: 1440, description: 'Time in minutes to wait for the TFJob to complete.'} +- {name: Delete Finished Tfjob, type: Bool, default: 'True' , description: 'Whether to delete the tfjob after it is finished.'} +implementation: + container: + image: liuhougangxa/kubeflow-tfjob-launcher:latest + command: [python, /ml/launch_tfjob.py] + args: [ + --name, {inputValue: Name}, + --namespace, {inputValue: Namespace}, + --version, {inputValue: Version}, + --activeDeadlineSeconds, {inputValue: ActiveDeadlineSeconds}, + --backoffLimit, {inputValue: BackoffLimit}, + --cleanPodPolicy, {inputValue: CleanPodPolicy}, + --ttlSecondsAfterFinished, {inputValue: ttl Seconds After Finished}, + --psSpec, {inputValue: PS Spec}, + --workerSpec, {inputValue: Worker Spec}, + --chiefSpec, {inputValue: Chief Spec}, + --evaluatorSpec, {inputValue: Evaluator Spec}, + --tfjobTimeoutMinutes, {inputValue: Tfjob Timeout Minutes}, + --deleteAfterDone, {inputValue: Delete Finished Tfjob}, + ] diff --git a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py deleted file mode 100644 index b5a7c7fd357..00000000000 --- a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp import dsl - -def kubeflow_tfjob_launcher_op(container_image, command, number_of_workers: int, number_of_parameter_servers: int, tfjob_timeout_minutes: int, output_dir=None, step_name='TFJob-launcher'): - return dsl.ContainerOp( - name = step_name, - image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:57d9f7f1cfd458e945d297957621716062d89a49', - arguments = [ - '--workers', number_of_workers, - '--pss', number_of_parameter_servers, - '--tfjob-timeout-minutes', tfjob_timeout_minutes, - '--container-image', container_image, - '--output-dir', output_dir, - '--ui-metadata-type', 'tensorboard', - '--', - ] + command, - file_outputs = {'train': '/output.txt'}, - output_artifact_paths={ - 'mlpipeline-ui-metadata': '/mlpipeline-ui-metadata.json', - }, - ) diff --git a/components/kubeflow/launcher/sample.py b/components/kubeflow/launcher/sample.py new file mode 100644 index 00000000000..a4368190e9d --- /dev/null +++ b/components/kubeflow/launcher/sample.py @@ -0,0 +1,75 @@ +import json +from kfp import components +import kfp.dsl as dsl + +@dsl.pipeline( + name="Launch kubeflow tfjob", + description="An example to launch tfjob." +) +def mnist_train( + name="mnist", + namespace="kubeflow", + workerNum=3, + ttlSecondsAfterFinished=-1, + tfjobTimeoutMinutes=60, + deleteAfterDone=False): + tfjob_launcher_op = components.load_component_from_file("./component.yaml") + # tfjob_launcher_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/master/components/kubeflow/launcher/component.yaml') + + chief = { + "replicas": 1, + "restartPolicy": "OnFailure", + "template": { + "spec": { + "containers": [ + { + "command": [ + "python", + "/opt/model.py" + ], + "args": [ + "--tf-train-steps=6000" + ], + "image": "liuhougangxa/tf-estimator-mnist", + "name": "tensorflow", + } + ] + } + } + } + worker = {} + if workerNum > 0: + worker = { + "replicas": workerNum, + "restartPolicy": "OnFailure", + "template": { + "spec": { + "containers": [ + { + "command": [ + "python", + "/opt/model.py" + ], + "args": [ + "--tf-train-steps=6000" + ], + "image": "liuhougangxa/tf-estimator-mnist", + "name": "tensorflow", + } + ] + } + } + } + tfjob_launcher_op( + name=name, + namespace=namespace, + ttl_seconds_after_finished=ttlSecondsAfterFinished, + worker_spec=worker, + chief_spec=chief, + tfjob_timeout_minutes=tfjobTimeoutMinutes, + delete_finished_tfjob=deleteAfterDone + ) + +if __name__ == "__main__": + import kfp.compiler as compiler + compiler.Compiler().compile(mnist_train, __file__ + ".tar.gz") diff --git a/components/kubeflow/launcher/src/__init__.py b/components/kubeflow/launcher/src/__init__.py index 9251ef2616a..ddd71c00285 100644 --- a/components/kubeflow/launcher/src/__init__.py +++ b/components/kubeflow/launcher/src/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,5 +11,3 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from .kubeflow_tfjob_launcher_op import kubeflow_tfjob_launcher_op diff --git a/components/kubeflow/launcher/src/launch_tf_job.py b/components/kubeflow/launcher/src/launch_tf_job.py deleted file mode 100644 index 414c15fd7c5..00000000000 --- a/components/kubeflow/launcher/src/launch_tf_job.py +++ /dev/null @@ -1,197 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -Usage: -python launch_tf_job.py - --workers=3 - --pss=1 - --container-image=gcr.io/${PROJECT_ID}/ml-pipeline-kubeflow-tf-trainer:${TAG_NAME} - --output-dir gs://ml-pipeline-playground/flower/trainer - --ui-metadata-type tensorboard - -- - python -m trainer.task - --job-dir=gs://ml-pipeline-playground/flower/trainer - --transformed-data-dir=gs://ml-pipeline-playground/flower/transformed - --schema=gs://ml-pipeline-playground/flower/schema.json - --target=label - --hidden-layer-size=100,50 - --steps=2000 -""" -# TODO: Add unit/integration tests - -import argparse -import datetime -import json -import os -import logging -import requests -import subprocess -import six -import time -import yaml -from py import tf_job_client -from kubernetes import client as k8s_client -from kubernetes import config - - -def _generate_train_yaml(src_filename, tfjob_ns, workers, pss, trainer_image, command): - """_generate_train_yaml generates train yaml files based on train.template.yaml""" - with open(src_filename, 'r') as f: - content = yaml.safe_load(f) - - content['metadata']['generateName'] = 'trainer-' - content['metadata']['namespace'] = tfjob_ns - - if workers and pss: - content['spec']['tfReplicaSpecs']['PS']['replicas'] = pss - content['spec']['tfReplicaSpecs']['PS']['template']['spec']['containers'][0]['image'] = trainer_image - content['spec']['tfReplicaSpecs']['PS']['template']['spec']['containers'][0]['command'] = command - content['spec']['tfReplicaSpecs']['Worker']['replicas'] = workers - content['spec']['tfReplicaSpecs']['Worker']['template']['spec']['containers'][0]['image'] = trainer_image - content['spec']['tfReplicaSpecs']['Worker']['template']['spec']['containers'][0]['command'] = command - content['spec']['tfReplicaSpecs']['MASTER']['template']['spec']['containers'][0]['image'] = trainer_image - content['spec']['tfReplicaSpecs']['MASTER']['template']['spec']['containers'][0]['command'] = command - else: - # If no workers and pss set, default is 1. - master_spec = content['spec']['tfReplicaSpecs']['MASTER'] - worker_spec = content['spec']['tfReplicaSpecs']['Worker'] - ps_spec = content['spec']['tfReplicaSpecs']['PS'] - master_spec['template']['spec']['containers'][0]['image'] = trainer_image - master_spec['template']['spec']['containers'][0]['command'] = command - worker_spec['template']['spec']['containers'][0]['image'] = trainer_image - worker_spec['template']['spec']['containers'][0]['command'] = command - ps_spec['template']['spec']['containers'][0]['image'] = trainer_image - ps_spec['template']['spec']['containers'][0]['command'] = command - - return content - -def main(argv=None): - parser = argparse.ArgumentParser(description='Kubeflow TFJob launcher') - parser.add_argument('--container-image', type=str, - help='''Container image to run using KubeFlow TFJob. The command line should be added after --.''') - parser.add_argument('--workers', type=int, default=0) - parser.add_argument('--pss', type=int, default=0) - parser.add_argument('--cluster', type=str, - help='GKE cluster set up for kubeflow. If set, zone must be provided. ' + - 'If not set, assuming this runs in a GKE container and current ' + - 'cluster is used.') - parser.add_argument('--zone', type=str, help='zone of the kubeflow cluster.') - parser.add_argument('--kfversion', type=str, - default='v1alpha2', - help='The version of the deployed kubeflow. ' + - 'If not set, the default version is v1alpha2') - parser.add_argument('--tfjob-ns', type=str, - default='default', - help='The namespace where the tfjob is submitted' + - 'If not set, the default namespace is default') - parser.add_argument('--tfjob-timeout-minutes', type=int, - default=10, - help='Time in minutes to wait for the TFJob to complete') - parser.add_argument('--output-dir', type=str) - parser.add_argument('--ui-metadata-type', type=str, default='tensorboard') - import sys - all_args = sys.argv[1:] - separator_idx = all_args.index('--') - launcher_args = all_args[:separator_idx] - remaining_args = all_args[separator_idx + 1:] - - args = parser.parse_args(launcher_args) - - logging.getLogger().setLevel(logging.INFO) - args_dict = vars(args) - if args.cluster and args.zone: - cluster = args_dict.pop('cluster') - zone = args_dict.pop('zone') - else: - # Get culster name and zone from metadata - metadata_server = "http://metadata/computeMetadata/v1/instance/" - metadata_flavor = {'Metadata-Flavor' : 'Google'} - cluster = requests.get(metadata_server + "attributes/cluster-name", - headers = metadata_flavor).text - zone = requests.get(metadata_server + "zone", - headers = metadata_flavor).text.split('/')[-1] - - logging.info('Getting credentials for GKE cluster %s.' % cluster) - subprocess.call(['gcloud', 'container', 'clusters', 'get-credentials', cluster, - '--zone', zone]) - - workers = args_dict.pop('workers') - pss = args_dict.pop('pss') - kf_version = args_dict.pop('kfversion') - tfjob_ns = args_dict.pop('tfjob_ns') - tfjob_timeout_minutes = args_dict.pop('tfjob_timeout_minutes') - trainer_image = args.container_image or os.environ['TRAINER_IMAGE_NAME'] - command=remaining_args - logging.info('Generating training template.') - template_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'train.template.yaml') - content_yaml = _generate_train_yaml(template_file, tfjob_ns, workers, pss, trainer_image, command) - - logging.info('Start training.') - # Set up handler for k8s clients - config.load_incluster_config() - api_client = k8s_client.ApiClient() - create_response = tf_job_client.create_tf_job(api_client, content_yaml, version=kf_version) - job_name = create_response['metadata']['name'] - - if args.output_dir: - # Create metadata.json file for visualization. - metadata = { - 'outputs' : [{ - 'type': args.ui_metadata_type, - 'source': args.output_dir, - }] - } - with open('/mlpipeline-ui-metadata.json', 'w') as f: - json.dump(metadata, f) - - wait_response = tf_job_client.wait_for_job( - api_client, tfjob_ns, job_name, kf_version, - timeout=datetime.timedelta(minutes=tfjob_timeout_minutes)) - succ = True - #TODO: update this failure checking after tf-operator has the condition checking function. - if 'Worker' in wait_response['status']['tfReplicaStatuses']: - if 'Failed' in wait_response['status']['tfReplicaStatuses']['Worker']: - logging.error('Training failed since workers failed.') - succ = False - if 'PS' in wait_response['status']['tfReplicaStatuses']: - if 'Failed' in wait_response['status']['tfReplicaStatuses']['PS']: - logging.error('Training failed since PSs failed.') - succ = False - if 'MASTER' in wait_response['status']['tfReplicaStatuses']: - if 'Failed' in wait_response['status']['tfReplicaStatuses']['MASTER']: - logging.error('Training failed since MASTER failed.') - succ = False - - #TODO: remove this after kubeflow fixes the wait_for_job issue - # because the wait_for_job returns when the worker finishes but the master might not be complete yet. - if 'MASTER' in wait_response['status']['tfReplicaStatuses'] and 'active' in wait_response['status']['tfReplicaStatuses']['MASTER']: - master_active = True - while master_active: - # Wait for master to finish - time.sleep(2) - wait_response = tf_job_client.wait_for_job(api_client, tfjob_ns, job_name, kf_version, - timeout=datetime.timedelta(minutes=tfjob_timeout_minutes)) - if 'active' not in wait_response['status']['tfReplicaStatuses']['MASTER']: - master_active = False - - if succ: - logging.info('Training success.') - - tf_job_client.delete_tf_job(api_client, tfjob_ns, job_name, version=kf_version) - with open('/output.txt', 'w') as f: - f.write(args.output_dir) - -if __name__== "__main__": - main() diff --git a/components/kubeflow/launcher/src/launch_tfjob.py b/components/kubeflow/launcher/src/launch_tfjob.py new file mode 100644 index 00000000000..30666d8d143 --- /dev/null +++ b/components/kubeflow/launcher/src/launch_tfjob.py @@ -0,0 +1,136 @@ +# Copyright 2019 kubeflow.org. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import datetime +from distutils.util import strtobool +import json +import os +import logging +import yaml +import launch_crd + +from kubernetes import client as k8s_client +from kubernetes import config + +def yamlOrJsonStr(str): + if str == "" or str == None: + return None + return yaml.safe_load(str) + +TFJobGroup = "kubeflow.org" +TFJobPlural = "tfjobs" + +class TFJob(launch_crd.K8sCR): + def __init__(self, version="v1", client=None): + super(TFJob, self).__init__(TFJobGroup, TFJobPlural, version, client) + + def is_expected_conditions(self, inst, expected_conditions): + conditions = inst.get('status', {}).get("conditions") + if not conditions: + return False, "" + if conditions[-1]["type"] in expected_conditions and conditions[-1]["status"] == "True": + return True, conditions[-1]["type"] + else: + return False, conditions[-1]["type"] + +def main(argv=None): + parser = argparse.ArgumentParser(description='Kubeflow TFJob launcher') + parser.add_argument('--name', type=str, + help='TFJob name.') + parser.add_argument('--namespace', type=str, + default='kubeflow', + help='TFJob namespace.') + parser.add_argument('--version', type=str, + default='v1', + help='TFJob version.') + parser.add_argument('--activeDeadlineSeconds', type=int, + default=-1, + help='Specifies the duration (in seconds) since startTime during which the job can remain active before it is terminated. Must be a positive integer. This setting applies only to pods where restartPolicy is OnFailure or Always.') + parser.add_argument('--backoffLimit', type=int, + default=-1, + help='Number of retries before marking this job as failed.') + parser.add_argument('--cleanPodPolicy', type=str, + default="Running", + help='Defines the policy for cleaning up pods after the TFJob completes.') + parser.add_argument('--ttlSecondsAfterFinished', type=int, + default=-1, + help='Defines the TTL for cleaning up finished TFJobs.') + parser.add_argument('--psSpec', type=yamlOrJsonStr, + default={}, + help='TFJob ps replicaSpecs.') + parser.add_argument('--workerSpec', type=yamlOrJsonStr, + default={}, + help='TFJob worker replicaSpecs.') + parser.add_argument('--chiefSpec', type=yamlOrJsonStr, + default={}, + help='TFJob chief replicaSpecs.') + parser.add_argument('--evaluatorSpec', type=yamlOrJsonStr, + default={}, + help='TFJob evaluator replicaSpecs.') + parser.add_argument('--deleteAfterDone', type=strtobool, + default=True, + help='When tfjob done, delete the tfjob automatically if it is True.') + parser.add_argument('--tfjobTimeoutMinutes', type=int, + default=60*24, + help='Time in minutes to wait for the TFJob to reach end') + + args = parser.parse_args() + + logging.getLogger().setLevel(logging.INFO) + + logging.info('Generating tfjob template.') + + config.load_incluster_config() + api_client = k8s_client.ApiClient() + tfjob = TFJob(version=args.version, client=api_client) + inst = { + "apiVersion": "%s/%s" % (TFJobGroup, args.version), + "kind": "TFJob", + "metadata": { + "name": args.name, + "namespace": args.namespace, + }, + "spec": { + "cleanPodPolicy": args.cleanPodPolicy, + "tfReplicaSpecs": { + }, + }, + } + if args.ttlSecondsAfterFinished >=0: + inst["spec"]["ttlSecondsAfterFinished"] = args.ttlSecondsAfterFinished + if args.backoffLimit >= 0: + inst["spec"]["backoffLimit"] = args.backoffLimit + if args.activeDeadlineSeconds >=0: + inst["spec"]["activeDeadlineSecond"] = args.activeDeadlineSeconds + if args.psSpec: + inst["spec"]["tfReplicaSpecs"]["PS"] = args.psSpec + if args.chiefSpec: + inst["spec"]["tfReplicaSpecs"]["Chief"] = args.chiefSpec + if args.workerSpec: + inst["spec"]["tfReplicaSpecs"]["Worker"] = args.workerSpec + if args.evaluatorSpec: + inst["spec"]["tfReplicaSpecs"]["Evaluator"] = args.evaluatorSpec + + create_response = tfjob.create(inst) + + expected_conditions = ["Succeeded", "Failed"] + tfjob.wait_for_condition( + args.namespace, args.name, expected_conditions, + timeout=datetime.timedelta(minutes=args.tfjobTimeoutMinutes)) + if args.deleteAfterDone: + tfjob.delete(args.name, args.namespace) + +if __name__== "__main__": + main() diff --git a/components/kubeflow/launcher/src/train.template.yaml b/components/kubeflow/launcher/src/train.template.yaml deleted file mode 100644 index 72e04d63bd8..00000000000 --- a/components/kubeflow/launcher/src/train.template.yaml +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -apiVersion: kubeflow.org/v1alpha2 -kind: TFJob -metadata: - generateName: tfjob - namespace: default -spec: - tfReplicaSpecs: - PS: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:57d9f7f1cfd458e945d297957621716062d89a49 - command: - - python - - -m - - trainer.task - env: - - name: GOOGLE_APPLICATION_CREDENTIALS - value: "/etc/secrets/user-gcp-sa.json" - volumeMounts: - - name: sa - mountPath: "/etc/secrets" - readOnly: true - volumes: - - name: sa - secret: - secretName: user-gcp-sa - Worker: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:57d9f7f1cfd458e945d297957621716062d89a49 - command: - - python - - -m - - trainer.task - env: - - name: GOOGLE_APPLICATION_CREDENTIALS - value: "/etc/secrets/user-gcp-sa.json" - volumeMounts: - - name: sa - mountPath: "/etc/secrets" - readOnly: true - volumes: - - name: sa - secret: - secretName: user-gcp-sa - MASTER: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:57d9f7f1cfd458e945d297957621716062d89a49 - command: - - python - - -m - - trainer.task - env: - - name: GOOGLE_APPLICATION_CREDENTIALS - value: "/etc/secrets/user-gcp-sa.json" - volumeMounts: - - name: sa - mountPath: "/etc/secrets" - readOnly: true - volumes: - - name: sa - secret: - secretName: user-gcp-sa diff --git a/components/kubeflow/launcher/test/test_launcher.py b/components/kubeflow/launcher/test/test_launcher.py deleted file mode 100644 index 4b1f0ab5029..00000000000 --- a/components/kubeflow/launcher/test/test_launcher.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import launcher -from launcher import train -import os -import shutil -import subprocess -import sys -import tempfile -import unittest -import yaml - - -class TestLauncher(unittest.TestCase): - - def test_yaml_generation_basic(self): - """Test generating train yaml from templates""" - - test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata') - train_template_file = os.path.join(test_data_dir, 'train.template.yaml') - tfjob_ns = 'default' - worker = 2 - pss = 1 - args_list = [] - args_list.append('--learning-rate=0.1') - generated_yaml = train._generate_train_yaml(train_template_file, tfjob_ns, worker, pss, args_list) - with open(os.path.join(test_data_dir, 'train_basic.yaml'), 'r') as f: - golden = yaml.safe_load(f) - self.assertEqual(golden, generated_yaml) - - def test_yaml_generation_advanced(self): - """Test generating train yaml with zero worker and specified tfjob namespace""" - - test_data_dir = os.path.join(os.path.dirname(__file__), 'testdata') - train_template_file = os.path.join(test_data_dir, 'train.template.yaml') - worker = 0 - pss = 0 - args_list = [] - tfjob_ns = 'kubeflow' - args_list.append('--learning-rate=0.1') - generated_yaml = train._generate_train_yaml(train_template_file, tfjob_ns, worker, pss, args_list) - with open(os.path.join(test_data_dir, 'train_zero_worker.yaml'), 'r') as f: - golden = yaml.safe_load(f) - self.assertEqual(golden, generated_yaml) - -if __name__ == '__main__': - unittest.main() diff --git a/components/kubeflow/launcher/test/testdata/train.template.yaml b/components/kubeflow/launcher/test/testdata/train.template.yaml deleted file mode 100644 index aeaa0b456e5..00000000000 --- a/components/kubeflow/launcher/test/testdata/train.template.yaml +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: kubeflow.org/v1alpha2 -kind: TFJob -metadata: - generateName: tfjob - namespace: default -spec: - tfReplicaSpecs: - PS: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer - command: - - python - - -m - - trainer.task - Worker: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer - command: - - python - - -m - - trainer.task - MASTER: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer - command: - - python - - -m - - trainer.task diff --git a/components/kubeflow/launcher/test/testdata/train_basic.yaml b/components/kubeflow/launcher/test/testdata/train_basic.yaml deleted file mode 100644 index 3e2a5aa6ff3..00000000000 --- a/components/kubeflow/launcher/test/testdata/train_basic.yaml +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: kubeflow.org/v1alpha2 -kind: TFJob -metadata: - generateName: trainer- - namespace: default -spec: - tfReplicaSpecs: - PS: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer - command: - - python - - -m - - trainer.task - - --learning-rate=0.1 - Worker: - replicas: 2 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer - command: - - python - - -m - - trainer.task - - --learning-rate=0.1 - MASTER: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer - command: - - python - - -m - - trainer.task - - --learning-rate=0.1 diff --git a/components/kubeflow/launcher/test/testdata/train_zero_worker.yaml b/components/kubeflow/launcher/test/testdata/train_zero_worker.yaml deleted file mode 100644 index 593a0a53ea1..00000000000 --- a/components/kubeflow/launcher/test/testdata/train_zero_worker.yaml +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -apiVersion: kubeflow.org/v1alpha2 -kind: TFJob -metadata: - generateName: trainer- - namespace: kubeflow -spec: - tfReplicaSpecs: - PS: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer - command: - - python - - -m - - trainer.task - - --learning-rate=0.1 - Worker: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer - command: - - python - - -m - - trainer.task - - --learning-rate=0.1 - MASTER: - replicas: 1 - restartPolicy: OnFailure - template: - spec: - containers: - - name: tensorflow - image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer - command: - - python - - -m - - trainer.task - - --learning-rate=0.1 diff --git a/components/local/confusion_matrix/component.yaml b/components/local/confusion_matrix/component.yaml index 025d4818ec1..022819575f6 100644 --- a/components/local/confusion_matrix/component.yaml +++ b/components/local/confusion_matrix/component.yaml @@ -9,7 +9,7 @@ outputs: - {name: MLPipeline Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:ad9bd5648dd0453005225779f25d8cebebc7ca00 command: [python2, /ml/confusion_matrix.py] args: [ --predictions, {inputValue: Predictions}, diff --git a/components/local/roc/component.yaml b/components/local/roc/component.yaml index 9e46900f7b3..c5d62219772 100644 --- a/components/local/roc/component.yaml +++ b/components/local/roc/component.yaml @@ -11,7 +11,7 @@ outputs: - {name: MLPipeline Metrics, type: Metrics} implementation: container: - image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:57d9f7f1cfd458e945d297957621716062d89a49 + image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:ad9bd5648dd0453005225779f25d8cebebc7ca00 command: [python2, /ml/roc.py] args: [ --predictions, {inputValue: Predictions dir}, diff --git a/components/presto/query/Dockerfile b/components/presto/query/Dockerfile new file mode 100644 index 00000000000..75538d4d947 --- /dev/null +++ b/components/presto/query/Dockerfile @@ -0,0 +1,17 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM python:3.7 + +RUN python3 -m pip install pyhive[presto] + +COPY ./src /pipelines/component/src diff --git a/components/presto/query/component.yaml b/components/presto/query/component.yaml new file mode 100644 index 00000000000..927425c9bd6 --- /dev/null +++ b/components/presto/query/component.yaml @@ -0,0 +1,54 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Presto Query +description: | + A Kubeflow Pipeline component to submit a query to Presto. +inputs: + - name: host + type: String + description: 'Presto Host.' + - name: catalog + type: String + description: 'The name of the catalog.' + - name: schema + type: String + description: 'The name of the schema.' + - name: query + type: String + description: 'The SQL query statements to be executed in Presto' + - name: user + type: String + description: 'The user of the Presto.' + - name: pwd + type: String + description: 'The password of the Presto.' + - name: output + description: 'The path or name of the emitted output.' +outputs: + - name: output + description: 'The path or name of the emitted output.' +implementation: + container: + image: docker.io/mkavi/kubeflow-pipeline-presto:latest + command: [ +python3, /pipelines/component/src/program.py, + --host, {inputValue: host}, + --catalog, {inputValue: catalog}, + --schema, {inputValue: schema}, + --query, {inputValue: query}, + --user, {inputValue: user}, + --pwd, {inputValue: pwd}, + --output, {inputValue: output} + ] + fileOutputs: + output: /output.txt diff --git a/components/presto/query/src/program.py b/components/presto/query/src/program.py new file mode 100644 index 00000000000..319fee2009b --- /dev/null +++ b/components/presto/query/src/program.py @@ -0,0 +1,75 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +from pyhive import presto + + +def get_conn(host=None, catalog=None, schema=None, user=None, pwd=None): + conn = presto.connect( + host=host, + port=443, + protocol="https", + catalog=catalog, + schema=schema, + username=user, + password=pwd, + ) + + return conn + + +def query(conn, query): + cursor = conn.cursor() + cursor.execute(query) + cursor.fetchall() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--host", type=str, help="Presto Host.") + parser.add_argument( + "--catalog", type=str, required=True, help="The name of the catalog." + ) + parser.add_argument( + "--schema", type=str, required=True, help="The name of the schema." + ) + parser.add_argument( + "--query", + type=str, + required=True, + help="The SQL query statements to be executed in Presto.", + ) + parser.add_argument( + "--user", type=str, required=True, help="The user of the Presto." + ) + parser.add_argument( + "--pwd", type=str, required=True, help="The password of the Presto." + ) + parser.add_argument( + "--output", + type=str, + required=True, + help="The path or name of the emitted output.", + ) + + args = parser.parse_args() + + conn = get_conn(args.host, args.catalog, args.schema, args.user, args.pwd) + query(conn, args.query) + + with open("/output.txt", "w+") as w: + w.write(args.output) + + +if __name__ == "__main__": + main() diff --git a/components/release.sh b/components/release.sh index 88d768689fb..58e8b9f5fdf 100755 --- a/components/release.sh +++ b/components/release.sh @@ -27,7 +27,7 @@ images=( "ml-pipeline-kubeflow-deployer" "ml-pipeline-kubeflow-tf-trainer" "ml-pipeline-kubeflow-tf-trainer-gpu" - "ml-pipeline-kubeflow-tf" + "ml-pipeline-kubeflow-tfjob" "ml-pipeline-dataproc-analyze" "ml-pipeline-dataproc-create-cluster" "ml-pipeline-dataproc-delete-cluster" diff --git a/components/sample/keras/train_classifier/README.md b/components/sample/keras/train_classifier/README.md index 3cc6b8d2dae..320a1f2c7e4 100644 --- a/components/sample/keras/train_classifier/README.md +++ b/components/sample/keras/train_classifier/README.md @@ -53,7 +53,7 @@ training_set_labels_gcs_path = os.path.join(input_data_gcs_dir, 'training_set_la gfile.Copy(training_set_features_local_path, training_set_features_gcs_path) gfile.Copy(training_set_labels_local_path, training_set_labels_gcs_path) -output_model_uri_template = os.path.join(output_data_gcs_dir, '{{workflow.uid}}/{{pod.name}}/output_model_uri/data') +output_model_uri_template = os.path.join(output_data_gcs_dir, kfp.dsl.EXECUTION_ID_PLACEHOLDER, 'output_model_uri', 'data') xor_model_config = requests.get(test_data_url_prefix + 'model_config.json').content diff --git a/components/tensorflow/tensorboard/prepare_tensorboard/component.yaml b/components/tensorflow/tensorboard/prepare_tensorboard/component.yaml new file mode 100644 index 00000000000..ef5741cbf30 --- /dev/null +++ b/components/tensorflow/tensorboard/prepare_tensorboard/component.yaml @@ -0,0 +1,35 @@ +name: Create Tensorboard visualization +description: | + Pre-creates Tensorboard visualization for a given Log dir URI. + This way the Tensorboard can be viewed before the training completes. + The output Log dir URI should be passed to a trainer component that will write Tensorboard logs to that directory. +inputs: +- {name: Log dir URI} +outputs: +- {name: Log dir URI} +- {name: MLPipeline UI Metadata} +implementation: + container: + image: alpine + command: + - sh + - -ex + - -c + - | + log_dir="$0" + output_log_dir_path="$1" + output_metadata_path="$2" + mkdir -p "$(dirname "$output_log_dir_path")" + mkdir -p "$(dirname "$output_metadata_path")" + echo "$log_dir" > "$output_log_dir_path" + echo ' + { + "outputs" : [{ + "type": "tensorboard", + "source": "'"$log_dir"'" + }] + } + ' >"$output_metadata_path" + - {inputValue: Log dir URI} + - {outputPath: Log dir URI} + - {outputPath: MLPipeline UI Metadata} diff --git a/components/third_party_licenses.csv b/components/third_party_licenses.csv index a86a2338b80..6977f8ac8a4 100644 --- a/components/third_party_licenses.csv +++ b/components/third_party_licenses.csv @@ -65,6 +65,7 @@ google-api-python-client,https://raw.githubusercontent.com/google/google-api-pyt google-auth,https://raw.githubusercontent.com/GoogleCloudPlatform/google-auth-library-python/master/LICENSE,Apache 2.0 jupyterlab,https://raw.githubusercontent.com/jupyterlab/jupyterlab/master/LICENSE,BSD-3-clause grpcio,https://raw.githubusercontent.com/grpc/grpc/master/LICENSE,Apache 2.0 +grpcio-gcp,https://raw.githubusercontent.com/GoogleCloudPlatform/grpc-gcp-python/master/LICENSE,Apache 2.0 absl-py,https://raw.githubusercontent.com/abseil/abseil-py/master/LICENSE,Apache 2.0 protobuf,https://raw.githubusercontent.com/protocolbuffers/protobuf/master/LICENSE,BSD-3-clause tensorflow,https://raw.githubusercontent.com/tensorflow/tensorflow/master/LICENSE,Apache 2.0 @@ -87,10 +88,15 @@ funcsigs,https://raw.githubusercontent.com/aliles/funcsigs/master/LICENSE,Apache gapic-google-cloud-pubsub-v1,https://raw.githubusercontent.com/googleapis/api-client-staging/master/generated/python/gapic-google-cloud-pubsub-v1/LICENSE,Apache 2.0 google-apitools,https://raw.githubusercontent.com/google/apitools/master/LICENSE,Apache 2.0 google-auth-library-python-httplib2,https://raw.githubusercontent.com/GoogleCloudPlatform/google-auth-library-python-httplib2/master/LICENSE,Apache 2.0 -google-cloud-bigquery,https://raw.githubusercontent.com/GoogleCloudPlatform/google-cloud-python/master/bigquery/LICENSE,Apache 2.0 -google-cloud-bigtable,https://raw.githubusercontent.com/GoogleCloudPlatform/google-cloud-python/master/bigtable/LICENSE,Apache 2.0 -google-cloud-core,https://raw.githubusercontent.com/GoogleCloudPlatform/google-cloud-python/master/core/LICENSE,Apache 2.0 -google-cloud-pubsub,https://raw.githubusercontent.com/GoogleCloudPlatform/google-cloud-python/master/pubsub/LICENSE,Apache 2.0 +google-cloud-bigquery,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-bigtable,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-core,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-dlp,https://raw.githubusercontent.com/googleapis/python-dlp/master/LICENSE,Apache 2.0 +google-cloud-language,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-pubsub,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 +google-cloud-spanner,https://raw.githubusercontent.com/googleapis/python-spanner/master/LICENSE,Apache 2.0 +google-cloud-videointelligence,https://raw.githubusercontent.com/googleapis/python-videointelligence/master/LICENSE,Apache 2.0 +google-cloud-vision,https://raw.githubusercontent.com/googleapis/python-vision/master/LICENSE,Apache 2.0 google-gax,https://raw.githubusercontent.com/googleapis/gax-python/master/LICENSE,Apache 2.0 googleapis-common-protos,https://raw.githubusercontent.com/googleapis/api-common-protos/master/LICENSE,Apache 2.0 googledatastore,https://raw.githubusercontent.com/GoogleCloudPlatform/google-cloud-datastore/master/LICENSE,Apache 2.0 @@ -101,7 +107,7 @@ monotonic,https://raw.githubusercontent.com/atdt/monotonic/master/LICENSE,Apache oauth2client,https://raw.githubusercontent.com/google/oauth2client/master/LICENSE,Apache 2.0 ply,https://raw.githubusercontent.com/dabeaz/ply/master/README.md,BSD google-cloud-datastore-v1,https://raw.githubusercontent.com/GoogleCloudPlatform/google-cloud-datastore/master/LICENSE,Apache 2.0 -proto-google-cloud-pubsub-v1,https://raw.githubusercontent.com/GoogleCloudPlatform/google-cloud-python/master/pubsub/LICENSE,Apache 2.0 +proto-google-cloud-pubsub-v1,https://raw.githubusercontent.com/googleapis/google-cloud-python/master/LICENSE,Apache 2.0 pytz,https://raw.githubusercontent.com/newvem/pytz/master/LICENSE.txt,MIT PyVCF,https://raw.githubusercontent.com/jamescasbon/PyVCF/master/LICENSE,BSD requests,https://raw.githubusercontent.com/requests/requests/master/LICENSE,Apache 2.0 @@ -137,8 +143,8 @@ ipython-genutils,https://raw.githubusercontent.com/ipython/ipython_genutils/mast ipywidgets,https://raw.githubusercontent.com/jupyter-widgets/ipywidgets/master/LICENSE,3-Clause BSD Jinja2,https://raw.githubusercontent.com/pallets/jinja/master/LICENSE.rst,BSD jsonschema,https://raw.githubusercontent.com/Julian/jsonschema/master/COPYING,MIT -jupyter,https://raw.githubusercontent.com/jupyter/notebook/master/COPYING.md,BSD -notebook,https://raw.githubusercontent.com/jupyter/notebook/master/COPYING.md,BSD +jupyter,https://raw.githubusercontent.com/jupyter/notebook/master/LICENSE,BSD +notebook,https://raw.githubusercontent.com/jupyter/notebook/master/LICENSE,BSD jupyter-client,https://raw.githubusercontent.com/jupyter/jupyter_client/master/COPYING.md,BSD jupyter-console,https://raw.githubusercontent.com/jupyter/jupyter_console/master/COPYING.md,BSD jupyter-core,https://raw.githubusercontent.com/jupyter/jupyter_core/master/COPYING.md,BSD @@ -194,3 +200,19 @@ grpc-google-logging-v2,https://raw.githubusercontent.com/googleapis/googleapis/m grpc-google-pubsub-v1,https://raw.githubusercontent.com/googleapis/googleapis/master/LICENSE,Apache 2.0 google-cloud-datastore,https://raw.githubusercontent.com/GoogleCloudPlatform/google-cloud-datastore/master/LICENSE,Apache 2.0 pymongo,https://raw.githubusercontent.com/mongodb/mongo-python-driver/master/LICENSE,Apache 2.0 +google-auth-oauthlib,https://raw.githubusercontent.com/googleapis/google-auth-library-python-oauthlib/master/LICENSE,Apache 2.0 +google-pasta,https://raw.githubusercontent.com/google/pasta/master/LICENSE,Apache 2.0 +Keras-Preprocessing,https://raw.githubusercontent.com/keras-team/keras-preprocessing/master/LICENSE,MIT +ml-metadata,https://raw.githubusercontent.com/google/ml-metadata/master/LICENSE,Apache 2.0 +opt-einsum,https://raw.githubusercontent.com/dgasmith/opt_einsum/master/LICENSE,MIT +tensorflow-estimator,https://raw.githubusercontent.com/tensorflow/estimator/master/LICENSE,Apache 2.0 +wrapt,https://github.com/GrahamDumpleton/wrapt/blob/develop/LICENSE,2-Clause BSD +tensorflow-serving-api,https://raw.githubusercontent.com/tensorflow/serving/master/LICENSE,Apache 2.0 +tfx-bsl,https://raw.githubusercontent.com/tensorflow/tfx-bsl/master/LICENSE,Apache 2.0 +keyring,https://raw.githubusercontent.com/jaraco/keyring/master/LICENSE,MIT +keyrings.alt,https://raw.githubusercontent.com/jaraco/keyrings.alt/master/LICENSE,MIT +pycrypto,https://raw.githubusercontent.com/dlitz/pycrypto/master/COPYRIGHT,Public Domain +pygobject,https://raw.githubusercontent.com/GNOME/pygobject/mainline/COPYING,LGPL +pyxdg,https://cgit.freedesktop.org/xdg/pyxdg/plain/COPYING,LGPL +SecretStorage,https://raw.githubusercontent.com/mitya57/secretstorage/master/LICENSE,BSD-3 +typing-extensions,https://raw.githubusercontent.com/python/typing/master/typing_extensions/LICENSE,Python Software Foundation License diff --git a/developer_guide.md b/developer_guide.md index 3a51e0586e9..a9e6a37f4bb 100644 --- a/developer_guide.md +++ b/developer_guide.md @@ -11,7 +11,7 @@ The Pipeline system is included in kubeflow. See [Getting Started Guide](https:/ ### GKE To be able to use GKE, the Docker images need to be uploaded to a public Docker repository, such as [GCR](https://cloud.google.com/container-registry/) -To build the API server image and upload it to GCR: +To build the API server image and upload it to GCR on x86_64 machines: ```bash # Run in the repository root directory $ docker build -t gcr.io//api-server:latest -f backend/Dockerfile . @@ -20,6 +20,17 @@ $ gcloud auth configure-docker $ docker push gcr.io//api-server:latest ``` +To build the API server image and upload it to GCR on non-x86_64 machines (such as aarch64 machines): +```bash +# Build bazel (e.g. version 0.24.0) image firstly +$ docker build -t bazel:0.24.0 -f backend/Dockerfile.bazel . +# Run in the repository root directory +$ docker build -t gcr.io//api-server:latest -f backend/Dockerfile --build-arg BAZEL_IMAGE=bazel:0.24.0 . +# Push to GCR +$ gcloud auth configure-docker +$ docker push gcr.io//api-server:latest +``` + To build the scheduled workflow controller image and upload it to GCR: ```bash # Run in the repository root directory @@ -69,7 +80,7 @@ $ docker build -t ml-pipeline-api-server -f backend/Dockerfile . Python based visualizations are a new method to visualize results within the Kubeflow Pipelines UI. For more information about Python based visualizations please visit the [documentation page](https://www.kubeflow.org/docs/pipelines/sdk/python-based-visualizations). -To create predefine visualizations please check the [developer guide](https://github.com/kubeflow/pipelines/blob/master/backend/src/apiserver/visualization/developer_guide.md). +To create predefine visualizations please check the [developer guide](https://github.com/kubeflow/pipelines/blob/master/backend/src/apiserver/visualization/README.md). ## Unit test diff --git a/docs/source/kfp.client.rst b/docs/source/kfp.client.rst index a81b9538b33..e324beb810d 100644 --- a/docs/source/kfp.client.rst +++ b/docs/source/kfp.client.rst @@ -5,3 +5,43 @@ kfp.Client class :members: :undoc-members: :show-inheritance: + +kfp.Client().runs +----------------------------- + +.. autoclass:: kfp_server_api.api.run_service_api.RunServiceApi + :members: + :undoc-members: + :show-inheritance: + +kfp.Client().pipelines +----------------------------- + +.. autoclass:: kfp_server_api.api.pipeline_service_api.PipelineServiceApi + :members: + :undoc-members: + :show-inheritance: + +kfp.Client().experiments +----------------------------- + +.. autoclass:: kfp_server_api.api.experiment_service_api.ExperimentServiceApi + :members: + :undoc-members: + :show-inheritance: + +kfp.Client().jobs +----------------------------- + +.. autoclass:: kfp_server_api.api.job_service_api.JobServiceApi + :members: + :undoc-members: + :show-inheritance: + +kfp.Client().pipeline_uploads +----------------------------- + +.. autoclass:: kfp_server_api.api.pipeline_upload_service_api.PipelineUploadServiceApi + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/source/kfp.dsl.rst b/docs/source/kfp.dsl.rst index 0a20e84849c..7dbc94d4d48 100644 --- a/docs/source/kfp.dsl.rst +++ b/docs/source/kfp.dsl.rst @@ -8,6 +8,10 @@ kfp.dsl package :imported-members: :exclude-members: Pipeline, OpsGroup, match_serialized_pipelineparam + .. py:data:: RUN_ID_PLACEHOLDER + + .. py:data:: EXECUTION_ID_PLACEHOLDER + .. toctree:: diff --git a/docs/source/kfp.rst b/docs/source/kfp.rst index fb6607415fd..c0cf566101f 100644 --- a/docs/source/kfp.rst +++ b/docs/source/kfp.rst @@ -14,3 +14,10 @@ kfp package kfp.client kfp.notebook kfp.extensions + +.. automodule:: kfp + :members: + :undoc-members: + :show-inheritance: + :imported-members: + :exclude-members: Client diff --git a/frontend/.eslintrc.yaml b/frontend/.eslintrc.yaml new file mode 100644 index 00000000000..4c2fdf628f7 --- /dev/null +++ b/frontend/.eslintrc.yaml @@ -0,0 +1,9 @@ +extends: + - react-app +ignorePatterns: + - node_modules/ + - src/apis # these are generated api clients + - '*.test.ts' + - '*.test.tsx' +rules: + react/jsx-no-target-blank: ['error', { "allowReferrer": true }] diff --git a/frontend/.gitignore b/frontend/.gitignore index 73f4e092e1e..93aed679931 100644 --- a/frontend/.gitignore +++ b/frontend/.gitignore @@ -8,8 +8,6 @@ swagger-codegen-cli.jar # testing /coverage -backstop_data/html_report/ -backstop_data/bitmaps_test/ # production /build @@ -24,3 +22,9 @@ backstop_data/bitmaps_test/ npm-debug.log* yarn-debug.log* yarn-error.log* + +# coverage reports +coverage + +# vscode +.vscode diff --git a/frontend/.nvmrc b/frontend/.nvmrc new file mode 100644 index 00000000000..cae54a258e6 --- /dev/null +++ b/frontend/.nvmrc @@ -0,0 +1 @@ +v12.14.1 diff --git a/frontend/.prettierignore b/frontend/.prettierignore new file mode 100644 index 00000000000..c83f90a2b01 --- /dev/null +++ b/frontend/.prettierignore @@ -0,0 +1 @@ +src/generated diff --git a/frontend/.prettierrc.yaml b/frontend/.prettierrc.yaml new file mode 100644 index 00000000000..960e71e6ab5 --- /dev/null +++ b/frontend/.prettierrc.yaml @@ -0,0 +1,4 @@ +trailingComma: all +singleQuote: true +jsxSingleQuote: true +printWidth: 100 diff --git a/frontend/Dockerfile b/frontend/Dockerfile index 9868a144b3a..c1662e16132 100644 --- a/frontend/Dockerfile +++ b/frontend/Dockerfile @@ -1,6 +1,10 @@ -FROM node:9.4.0 as build +FROM node:12.14.1 as build ARG COMMIT_HASH +ENV COMMIT_HASH=${COMMIT_HASH} +ARG TAG_NAME +ENV TAG_NAME=${TAG_NAME} + ARG DATE WORKDIR ./src @@ -9,19 +13,13 @@ COPY . . WORKDIR ./frontend -# Workaround for ppc64le since phantomjs does not support ppc64le -RUN if [ "$(uname -m)" = "ppc64le" ]; then \ - wget --no-verbose -O /tmp/phantomjs-2.1.1-linux-ppc64.tar.bz2 https://github.com/ibmsoe/phantomjs/releases/download/2.1.1/phantomjs-2.1.1-linux-ppc64.tar.bz2 \ - && tar xf /tmp/phantomjs-2.1.1-linux-ppc64.tar.bz2 -C /usr/local/ \ - && ln -s /usr/local/phantomjs-2.1.1-linux-ppc64/bin/phantomjs /usr/bin/phantomjs; \ - fi - -RUN npm install && npm run postinstall +RUN npm ci && npm run postinstall RUN npm run build RUN mkdir -p ./server/dist && \ echo ${COMMIT_HASH} > ./server/dist/COMMIT_HASH && \ - echo ${DATE} > ./server/dist/BUILD_DATE + echo ${DATE} > ./server/dist/BUILD_DATE && \ + echo ${TAG_NAME} > ./server/dist/TAG_NAME # Generate the dependency licenses files (one for the UI and one for the webserver), # concatenate them to one file under ./src/server @@ -29,7 +27,7 @@ RUN npm i -D license-checker RUN node gen_licenses . && node gen_licenses server && \ cat dependency-licenses.txt >> server/dependency-licenses.txt -FROM node:9.4.0-alpine +FROM node:12.14.1-alpine COPY --from=build ./src/frontend/server /server COPY --from=build ./src/frontend/build /client diff --git a/frontend/OWNERS b/frontend/OWNERS index 98a0efa0014..e4115faf230 100644 --- a/frontend/OWNERS +++ b/frontend/OWNERS @@ -1,10 +1,6 @@ approvers: - - rileyjbauer - bobgy - jingzhang36 - rmgogogo reviewers: - - rileyjbauer - bobgy - - jingzhang36 - - rmgogogo diff --git a/frontend/README.md b/frontend/README.md index 1a2f816d606..2890c088f64 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,47 +1,133 @@ # Kubeflow Pipelines Management Frontend -**Develop:** -You need `npm`, install dependencies using `npm install`. +## Tools you need -If you made any changes to protos (see backend/README), you'll need to -regenerate the Typescript client library from swagger. We use -swagger-codegen-cli@2.4.7, which you can get -[here](http://central.maven.org/maven2/io/swagger/swagger-codegen-cli/2.4.7/swagger-codegen-cli-2.4.7.jar). -Make sure the jar file is somewhere on your path with the name -swagger-codegen-cli.jar, then run `npm run apis`. +You need `node v12` and `npm`. +Recommend installing `node` and `npm` using https://github.com/nvm-sh/nvm. After installing nvm, +you can install `node v12` by `nvm install 12`. + +## Manage dev environment with npm + +### First time +1. Clone this repo +2. Navigate to frontend folder: `cd $KFP_SRC/frontend`. +3. Install dependencies: `npm ci`. + +`npm ci` makes sure your installed dependencies have the exact same version as others. (Usually, you just +need to run this once, but after others installed extra dependencies, you need to run `npm ci` again to +get package updates.) + +### Package management +Run `npm install --save ` (or `npm i -S ` for short) to install runtime dependencies and save them to package.json. +Run `npm install --save-dev ` (or `npm i -D ` for short) to install dev dependencies and save them to package.json. + +### Daily workflow +You will see a lot of `npm run xxx` commands in instructions below, the actual script being run is defined in the "scripts" field of [package.json](https://github.com/kubeflow/pipelines/blob/91db95a601fa7fffcb670cb744a5dcaeb08290ae/frontend/package.json#L32). Development common scripts are maintained in package.json, and we use npm to call them conveniently. -You can then do `npm start` to run a static file server at port 3000 that -watches the source files. This also adds a mock backend api server handler to -webpack-dev-server so it can serve basic api calls, as well as a mock -webserver to handle the Single Page App requests, which redirects api -requests to the aforementioned mock api server. For example, requesting the -pipelines page sends a fetch request to +### npm next step +You can learn more about npm in https://docs.npmjs.com/about-npm/. + +## Start frontend development server + +You can then do `npm start` to run a webpack dev server at port 3000 that +watches the source files. It also redirects api requests to localhost:3001. For +example, requesting the pipelines page sends a fetch request to http://localhost:3000/apis/v1beta1/pipelines, which is proxied by the -webserver to the api server at http://localhost:3001/apis/v1beta1/pipelines, -which will return the list of pipelines currently defined in the mock -database. +webserver to http://localhost:3001/apis/v1beta1/pipelines, +which should return the list of pipelines. + +Follow the next section to start an api mock/proxy server to let localhost:3001 +respond to api requests. + +## Start api mock/proxy server + +### Api mock server + +This is the easiest way to start developing, but it does not support all apis during +development. + +Run `npm run mock:api` to start a mock backend api server handler so it can +serve basic api calls with mock data. + +### Proxy to a real cluster + +This requires you already have a real KFP cluster, you can proxy requests to it. + +Before you start, configure your `kubectl` to talk to your KFP cluster. -### Using a real cluster as backend +Then it depends on what you want to develop: -1. First configure your `kubectl` to talk to your kfp lite cluster. -2. `npm run start:proxies` to start proxy servers that port forwards to your cluster. -3. `npm start` to start a webpack dev server, it has already been configured to talk to aforementioned proxies. +| What to develop? | Script to run | Extra notes | +| ----------------------- | -------------------------------------------------------------- | ------------------------------------------------------------------ | +| Client UI | `NAMESPACE=kubeflow npm run start:proxy` | | +| Client UI + Node server | `NAMESPACE=kubeflow npm run start:proxy-and-server` | You need to rerun the script every time you edit node server code. | +| Client UI + Node server (debug mode) | `NAMESPACE=kubeflow npm run start:proxy-and-server-inspect` | Same as above, and you can use chrome to debug the server. | -**Production Build:** +## Unit testing FAQ +There are a few typees of tests during presubmit: +* formatting, refer to [Code Style Section](#code-style) +* linting, you can also run locally with `npm run lint` +* client UI unit tests, you can run locally with `npm test` +* UI node server unit tests, you can run locally with `cd server && npm test` + +There is a special type of unit test called [snapshot tests](https://jestjs.io/docs/en/snapshot-testing). When +snapshot tests are failing, you can update them automatically with `npm test -u` and run all tests. Then commit +the snapshot changes. + +## Production Build You can do `npm run build` to build the frontend code for production, which creates a ./build directory with the minified bundle. You can test this bundle using `server/server.js`. Note you need to have an API server running, which you can then feed its address (host + port) as environment variables into `server.js`. See the usage instructions in that file for more. -The mock API server and the mock webserver can still be used with the -production UI code by running `npm run mock:api` and `npm run mock:server`. - -**Container Build:** +## Container Build You can also do `npm run docker` if you have docker installed to build an image containing the production bundle and the server pieces. In order to run this image, you'll need to port forward 3000, and pass the environment variables `ML_PIPELINE_SERVICE_HOST` and -`ML_PIPELINE_SERVICE_PORT` with the details of the API server, which -you can run using `npm run api` separately. +`ML_PIPELINE_SERVICE_PORT` with the details of the API server. + +## Code Style + +We use [prettier](https://prettier.io/) for code formatting, our prettier config +is [here](https://github.com/kubeflow/pipelines/blob/master/frontend/.prettierrc.yaml). + +To understand more what prettier is: [What is Prettier](https://prettier.io/docs/en/index.html). + +### IDE Integration + +- For vscode, install the plugin "Prettier - Code formatter" and it will pick + this project's config automatically. + Recommend setting the following in [settings.json](https://code.visualstudio.com/docs/getstarted/settings#_settings-file-locations) for vscode to autoformat on save. + ``` + "[typescript]": { + "editor.formatOnSave": true, + "files.trimTrailingWhitespace": false, + }, + "[typescriptreact]": { + "editor.formatOnSave": true, + "files.trimTrailingWhitespace": false, + }, + ``` + Also, vscode builtin trailing whitespace [conflicts with jest inline snapshot](https://github.com/Microsoft/vscode/issues/52711), so recommend disabling it. +- For others, refer to https://prettier.io/docs/en/editors.html. + +### Format Code Manually + +Run `npm run format`. + +### Escape hatch + +If there's some code that you don't want being formatted by prettier, follow +guide [here](https://prettier.io/docs/en/ignore.html). (Most likely you don't need this.) + +## Api client code generation + +If you made any changes to protos (see backend/README), you'll need to +regenerate the Typescript client library from swagger. We use +swagger-codegen-cli@2.4.7, which you can get +[here](https://repo1.maven.org/maven2/io/swagger/swagger-codegen-cli/2.4.7/). +Make sure the jar file is somewhere on your path with the name +swagger-codegen-cli.jar, then run `npm run apis`. diff --git a/frontend/analyze_bundle.js b/frontend/analyze_bundle.js index 8f7e054a901..30191da4520 100644 --- a/frontend/analyze_bundle.js +++ b/frontend/analyze_bundle.js @@ -14,17 +14,16 @@ * limitations under the License. */ -process.env.NODE_ENV = "production" -var BundleAnalyzerPlugin = require("webpack-bundle-analyzer") - .BundleAnalyzerPlugin +process.env.NODE_ENV = 'production'; +var BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin; -const webpackConfigProd = require("react-scripts-ts/config/webpack.config.prod") +const webpackConfigProd = require('react-scripts/config/webpack.config.prod'); webpackConfigProd.plugins.push( new BundleAnalyzerPlugin({ - analyzerMode: "static", - reportFilename: "report.html", - }) -) + analyzerMode: 'static', + reportFilename: 'report.html', + }), +); -require("react-scripts-ts/scripts/build") +require('react-scripts/scripts/build'); diff --git a/frontend/backstop.json b/frontend/backstop.json deleted file mode 100644 index ec1dce4a4b6..00000000000 --- a/frontend/backstop.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "id": "pipelines", - "viewports": [ - { - "width": 1024, - "height": 768 - } - ], - "onReadyScript": "steps.js", - "scenarios": [ - { - "label": "initial state", - "url": "http://localhost:3000" - }, - { - "label": "hover on first row", - "url": "http://localhost:3000", - "steps": [ - { "action": "hover", "selector": ".row" } - ] - }, - { - "label": "select one row", - "url": "http://localhost:3000", - "steps": [ - { "action": "click", "selector": ".row" } - ] - }, - { - "label": "select multiple rows", - "url": "http://localhost:3000", - "steps": [ - { "action": "click", "selector": ".row" }, - { "action": "click", "selector": ".row:nth-of-type(2) td" }, - { "action": "click", "selector": ".row:nth-of-type(5) td" } - ] - }, - { - "label": "open upload dialog", - "url": "http://localhost:3000", - "steps": [ - { "action": "click", "selector": "#uploadBtn" } - ] - } - ], - "paths": { - "bitmaps_reference": "backstop_data/bitmaps_reference", - "bitmaps_test": "backstop_data/bitmaps_test", - "engine_scripts": "backstop_data/engine_scripts", - "html_report": "backstop_data/html_report", - "ci_report": "backstop_data/ci_report" - }, - "report": ["browser"], - "engine": "puppeteer", - "engineOptions": { - "args": ["--no-sandbox"] - }, - "asyncCaptureLimit": 5, - "asyncCompareLimit": 5, - "debug": false, - "debugWindow": false -} diff --git a/frontend/backstop.ts b/frontend/backstop.ts deleted file mode 100644 index 9d58c9b39f8..00000000000 --- a/frontend/backstop.ts +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright 2018 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// tslint:disable-next-line:no-var-requires -const backstopjs = require('backstopjs'); -const url = 'http://localhost:3000'; - -const config = { - asyncCaptureLimit: 10, - asyncCompareLimit: 50, - debug: false, - debugWindow: false, - engine: 'puppeteer', - engineOptions: { - args: ['--no-sandbox'] - }, - id: 'pipelines', - onReadyScript: 'steps.js', - paths: { - bitmaps_reference: 'backstop_data/bitmaps_reference', - bitmaps_test: 'backstop_data/bitmaps_test', - ci_report: 'backstop_data/ci_report', - engine_scripts: 'backstop_data/engine_scripts', - html_report: 'backstop_data/html_report', - }, - report: ['browser'], - scenarios: [ - { - label: 'initial state', - url, - }, - { - label: 'hover on first row', - steps: [ - { action: 'hover', selector: '.tableRow' }, - ], - url, - }, - { - label: 'select one row', - steps: [ - { action: 'click', selector: '.tableRow' } - ], - url, - }, - { - label: 'select multiple rows', - steps: [ - { action: 'click', selector: '.tableRow' }, - { action: 'click', selector: `.tableRow:nth-of-type(2)` }, - { action: 'click', selector: `.tableRow:nth-of-type(5)` } - ], - url, - }, - { - label: 'open upload dialog', - steps: [ - { action: 'click', selector: '#uploadBtn' }, - { action: 'pause' } - ], - url, - } - ], - viewports: [{ width: 1024, height: 768 }], -}; - -backstopjs('test', { config }); diff --git a/frontend/backstop_data/bitmaps_reference/pipelines_hover_on_first_row_0_document_0_.png b/frontend/backstop_data/bitmaps_reference/pipelines_hover_on_first_row_0_document_0_.png deleted file mode 100644 index 48e2e8c6b8c..00000000000 Binary files a/frontend/backstop_data/bitmaps_reference/pipelines_hover_on_first_row_0_document_0_.png and /dev/null differ diff --git a/frontend/backstop_data/bitmaps_reference/pipelines_initial_state_0_document_0_.png b/frontend/backstop_data/bitmaps_reference/pipelines_initial_state_0_document_0_.png deleted file mode 100644 index 89d23500391..00000000000 Binary files a/frontend/backstop_data/bitmaps_reference/pipelines_initial_state_0_document_0_.png and /dev/null differ diff --git a/frontend/backstop_data/bitmaps_reference/pipelines_open_upload_dialog_0_document_0_.png b/frontend/backstop_data/bitmaps_reference/pipelines_open_upload_dialog_0_document_0_.png deleted file mode 100644 index 0ddb7a6f96d..00000000000 Binary files a/frontend/backstop_data/bitmaps_reference/pipelines_open_upload_dialog_0_document_0_.png and /dev/null differ diff --git a/frontend/backstop_data/bitmaps_reference/pipelines_select_multiple_rows_0_document_0_.png b/frontend/backstop_data/bitmaps_reference/pipelines_select_multiple_rows_0_document_0_.png deleted file mode 100644 index e205ae5001d..00000000000 Binary files a/frontend/backstop_data/bitmaps_reference/pipelines_select_multiple_rows_0_document_0_.png and /dev/null differ diff --git a/frontend/backstop_data/bitmaps_reference/pipelines_select_one_row_0_document_0_.png b/frontend/backstop_data/bitmaps_reference/pipelines_select_one_row_0_document_0_.png deleted file mode 100644 index 85ce03ef24a..00000000000 Binary files a/frontend/backstop_data/bitmaps_reference/pipelines_select_one_row_0_document_0_.png and /dev/null differ diff --git a/frontend/backstop_data/engine_scripts/imageStub.jpg b/frontend/backstop_data/engine_scripts/imageStub.jpg deleted file mode 100644 index 3e526b4bbbc..00000000000 Binary files a/frontend/backstop_data/engine_scripts/imageStub.jpg and /dev/null differ diff --git a/frontend/backstop_data/engine_scripts/steps.js b/frontend/backstop_data/engine_scripts/steps.js deleted file mode 100644 index d6b383c0de2..00000000000 --- a/frontend/backstop_data/engine_scripts/steps.js +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2018 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -module.exports = async (page, scenario) => { - const steps = scenario.steps; - if (!steps || !steps.length) { - return; - } - for (const s of steps) { - console.log('performing action: ' + s.action + ' on: ' + s.selector); - switch (s.action) { - case 'click': - await page.click(s.selector); - break; - case 'hover': - await page.hover(s.selector); - break; - case 'waitFor': - await page.waitFor(s.selector); - break; - case 'pause': - await page.waitFor(300); - break; - default: - throw new Error('Unknown action: ' + s.action); - } - } -}; diff --git a/frontend/gen_licenses.js b/frontend/gen_licenses.js index 8ac38a3a785..78032248b0e 100644 --- a/frontend/gen_licenses.js +++ b/frontend/gen_licenses.js @@ -29,6 +29,11 @@ const path = require('path'); const start = path.resolve(process.cwd(), process.argv[2]); let licenseMissing = 0; const whitelist = new Map([ + ['@jest/environment', 'third_party/jest/LICENSE'], + ['@jest/reporters', 'third_party/jest/LICENSE'], + ['@jest/test-sequencer', 'third_party/jest/LICENSE'], + ['@jest/transform', 'third_party/jest/LICENSE'], + ['eslint-module-utils', 'third_party/eslint/LICENSE'], ['@kubernetes/client-node', 'third_party/@kubernetes/client-node/LICENSE'], ['expect', 'third_party/jest/LICENSE'], ['jest-config', 'third_party/jest/LICENSE'], @@ -47,6 +52,7 @@ const whitelist = new Map([ ['jest-runtime', 'third_party/jest/LICENSE'], ['jest-snapshot', 'third_party/jest/LICENSE'], ['jest-util', 'third_party/jest/LICENSE'], + ['mamacro', 'third_party/mamacro/LICENSE'], ]); licenseChecker.init({ diff --git a/frontend/global-setup.js b/frontend/global-setup.js new file mode 100644 index 00000000000..0932e9d8652 --- /dev/null +++ b/frontend/global-setup.js @@ -0,0 +1,6 @@ +export default () => { + // This let unit tests run in UTC timezone consistently, despite developers' + // dev machine's timezone. + // Reference: https://stackoverflow.com/a/56482581 + process.env.TZ = 'UTC'; +}; diff --git a/frontend/mock-backend/fixed-data.ts b/frontend/mock-backend/fixed-data.ts index 8f5e273091d..25704bc4557 100644 --- a/frontend/mock-backend/fixed-data.ts +++ b/frontend/mock-backend/fixed-data.ts @@ -12,16 +12,16 @@ // See the License for the specific language governing permissions and // limitations under the License. +import { ApiExperiment } from '../src/apis/experiment'; +import { ApiJob } from '../src/apis/job'; +import { ApiPipeline } from '../src/apis/pipeline'; +import { ApiRelationship, ApiResourceType, ApiRunDetail, RunMetricFormat } from '../src/apis/run'; import helloWorldRun from './hello-world-runtime'; import helloWorldWithStepsRun from './hello-world-with-steps-runtime'; +import jsonRun from './json-runtime'; import coinflipRun from './mock-coinflip-runtime'; import errorRun from './mock-error-runtime'; import xgboostRun from './mock-xgboost-runtime'; -import jsonRun from './json-runtime'; -import { ApiExperiment } from '../src/apis/experiment'; -import { ApiJob } from '../src/apis/job'; -import { ApiPipeline } from '../src/apis/pipeline'; -import { ApiRunDetail, ApiResourceType, ApiRelationship, RunMetricFormat } from '../src/apis/run'; function padStartTwoZeroes(str: string): string { let padded = str || ''; @@ -51,8 +51,8 @@ const pipelines: ApiPipeline[] = [ }, { name: 'output', - } - ] + }, + ], }, { created_at: new Date('2018-04-02T20:59:29.000Z'), @@ -71,8 +71,8 @@ const pipelines: ApiPipeline[] = [ }, { name: 'output', - } - ] + }, + ], }, { created_at: new Date('2018-04-03T20:58:23.000Z'), @@ -102,8 +102,23 @@ const pipelines: ApiPipeline[] = [ }, { name: 'output', - } - ] + }, + ], + }, + { + created_at: new Date('2019-10-25T20:59:23.000Z'), + description: + 'A pipeline using [markdown](https://en.wikipedia.org/wiki/Markdown) for description.', + id: '8fbe3bd6-a01f-11e8-98d0-529269fb1461', + name: 'Markdown description', + parameters: [], + }, + { + created_at: new Date('2020-01-22T20:59:23.000Z'), + description: 'A pipeline with a long name', + id: '9fbe3bd6-a01f-11e8-98d0-529269fb1462', + name: 'A pipeline with a very very very very very very very long name', + parameters: [], }, ]; @@ -134,25 +149,27 @@ const jobs: ApiJob[] = [ { name: 'output', value: 'gs://path-to-my-project', - } + }, ], pipeline_id: pipelines[0].id, pipeline_name: pipelines[0].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], status: 'Failed:Succeeded', trigger: { cron_schedule: { cron: '30 1 * * * ?', end_time: new Date('2018-04-01T21:58:23.000Z'), start_time: new Date('2018-03-01T21:58:23.000Z'), - } + }, }, updated_at: new Date('2018-03-01T21:58:23.000Z'), }, @@ -176,18 +193,20 @@ const jobs: ApiJob[] = [ { name: 'output', value: 'some-output-path', - } + }, ], pipeline_id: pipelines[1].id, pipeline_name: pipelines[1].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], status: 'Succeeded', trigger: { cron_schedule: { @@ -221,24 +240,26 @@ const jobs: ApiJob[] = [ { name: 'output', value: 'gs://path-to-my-other-project', - } + }, ], pipeline_id: pipelines[2].id, pipeline_name: pipelines[2].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], status: 'Succeeded', trigger: { periodic_schedule: { end_time: new Date('2018-03-03T23:58:23.000Z'), interval_second: '439652', - } + }, }, updated_at: new Date('2018-03-03T23:58:23.000Z'), }, @@ -258,11 +279,17 @@ const experiments: ApiExperiment[] = [ name: 'Kubeflow Pipelines Experiment', }, { - description: 'A different Pipeline experiment used to group runs. ' + + description: + 'A different Pipeline experiment used to group runs. ' + 'This experiment also has a very long description, which should overflow the container card.', id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', name: 'Experiment Number 2', }, + { + description: 'This experiment has a very very very long name', + id: 'z4d4f8c6-ce9c-4200-a92e-c48ec759b733', + name: 'Experiment with a very very very very very very very very very very very very long name', + }, ]; const runs: ApiRunDetail[] = [ @@ -287,7 +314,7 @@ const runs: ApiRunDetail[] = [ name: 'log_loss', node_id: 'coinflip-recursive-q7dqb', number_value: -0.573, - } + }, ], name: 'coinflip-recursive-run-lknlfs3', pipeline_spec: { @@ -298,13 +325,15 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[0].id, pipeline_name: pipelines[0].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-03-17T20:58:23.000Z'), status: 'Failed:Succeeded', }, @@ -337,13 +366,15 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[0].id, pipeline_name: pipelines[0].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-04-17T21:00:00.000Z'), status: 'Error', }, @@ -356,12 +387,14 @@ const runs: ApiRunDetail[] = [ created_at: new Date('2018-05-17T21:58:23.000Z'), description: 'A simple run with json input', id: '183ac01f-dc26-4ebf-b817-7b3f96fdc3ac', - metrics: [{ - format: RunMetricFormat.PERCENTAGE, - name: 'accuracy', - node_id: 'json-12abc', - number_value: 0.5423, - }], + metrics: [ + { + format: RunMetricFormat.PERCENTAGE, + name: 'accuracy', + node_id: 'json-12abc', + number_value: 0.5423, + }, + ], name: 'json-12abc', pipeline_spec: { parameters: [ @@ -371,16 +404,18 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[2].id, pipeline_name: pipelines[2].name, }, - resource_references: [{ - key: { - id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-05-17T21:58:23.000Z'), status: 'Running', - } + }, }, { pipeline_runtime: { @@ -390,12 +425,14 @@ const runs: ApiRunDetail[] = [ created_at: new Date('2018-05-17T21:58:23.000Z'), description: 'A simple hello world run', id: 'fa5d897e-88d3-4dfc-b189-9dea6947c9bc', - metrics: [{ - format: RunMetricFormat.PERCENTAGE, - name: 'accuracy', - node_id: 'hello-world-7sm94', - number_value: 0.5423, - }], + metrics: [ + { + format: RunMetricFormat.PERCENTAGE, + name: 'accuracy', + node_id: 'hello-world-7sm94', + number_value: 0.5423, + }, + ], name: 'hello-world-7sm94', pipeline_spec: { parameters: [ @@ -405,13 +442,15 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[2].id, pipeline_name: pipelines[2].name, }, - resource_references: [{ - key: { - id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-05-17T21:58:23.000Z'), status: 'Running', }, @@ -425,12 +464,14 @@ const runs: ApiRunDetail[] = [ description: 'A simple hello world run, but with steps. Not attached to any experiment', finished_at: new Date('2018-06-18T21:00:33.000Z'), id: '21afb688-7597-47e9-b6c3-35d3145fe5e1', - metrics: [{ - format: RunMetricFormat.PERCENTAGE, - name: 'accuracy', - node_id: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c', - number_value: 0.43, - }], + metrics: [ + { + format: RunMetricFormat.PERCENTAGE, + name: 'accuracy', + node_id: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c', + number_value: 0.43, + }, + ], name: 'hello-world-with-steps-kajnkv4', pipeline_spec: { parameters: [ @@ -487,13 +528,15 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[1].id, pipeline_name: pipelines[1].name, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-07-17T23:58:23.000Z'), status: 'Pending', }, @@ -504,14 +547,15 @@ const runs: ApiRunDetail[] = [ }, run: { created_at: new Date('2018-08-18T20:58:23.000Z'), - description: 'An xgboost evaluation run with a very long description that includes:' - + ' Lorem ipsum dolor sit amet, consectetur adipiscing elit. Praesent fermentum commodo' - + ' libero, a imperdiet ipsum cursus id. Nullam odio sem, ornare id sollicitudin ac,' - + ' rutrum in dolor. Integer interdum lacus in ex rutrum elementum. Mauris gravida feugiat' - + ' enim, ac dapibus augue rhoncus in. Integer vel tempus nulla. Cras sed ultrices dolor.' - + ' Ut nec dapibus eros, vitae iaculis nunc. In aliquet accumsan rhoncus. Donec vitae' - + ' ipsum a tellus fermentum pharetra in in neque. Pellentesque consequat felis non est' - + ' vulputate pellentesque. Aliquam eget cursus enim.', + description: + 'An xgboost evaluation run with a very long description that includes:' + + ' Lorem ipsum dolor sit amet, consectetur adipiscing elit. Praesent fermentum commodo' + + ' libero, a imperdiet ipsum cursus id. Nullam odio sem, ornare id sollicitudin ac,' + + ' rutrum in dolor. Integer interdum lacus in ex rutrum elementum. Mauris gravida feugiat' + + ' enim, ac dapibus augue rhoncus in. Integer vel tempus nulla. Cras sed ultrices dolor.' + + ' Ut nec dapibus eros, vitae iaculis nunc. In aliquet accumsan rhoncus. Donec vitae' + + ' ipsum a tellus fermentum pharetra in in neque. Pellentesque consequat felis non est' + + ' vulputate pellentesque. Aliquam eget cursus enim.', finished_at: new Date('2018-08-20T21:01:23.000Z'), id: '7fc01714-4a13-4c05-8044-a8a72c14253b', metrics: [ @@ -526,9 +570,10 @@ const runs: ApiRunDetail[] = [ name: 'log_loss', node_id: 'xgboost-training-gzkm9-2365787662', number_value: -0.123, - } + }, ], - name: 'xgboost-run-with-a-veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeery-' + + name: + 'xgboost-run-with-a-veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeery-' + 'loooooooooooooooooooooooooooong-name-aifk298', pipeline_spec: { parameters: [ @@ -538,13 +583,15 @@ const runs: ApiRunDetail[] = [ pipeline_id: pipelines[1].id, pipeline_name: pipelines[1].name, }, - resource_references: [{ - key: { - id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-08-18T20:58:23.000Z'), status: 'Succeeded', }, @@ -570,7 +617,7 @@ const runs: ApiRunDetail[] = [ name: 'log_loss', node_id: 'hello-world-7sm94', number_value: -0.223, - } + }, ], name: 'hello-world-with-pipeline', pipeline_spec: { @@ -580,13 +627,15 @@ const runs: ApiRunDetail[] = [ ], workflow_manifest: JSON.stringify(helloWorldRun), }, - resource_references: [{ - key: { - id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-08-18T20:58:23.000Z'), status: 'Succeeded', }, @@ -624,7 +673,9 @@ function generateNRuns(): ApiRunDetail[] { run: { created_at: new Date('2018-02-12T20:' + padStartTwoZeroes(i.toString()) + ':23.000Z'), description: 'The description of a dummy run', - finished_at: new Date('2018-02-12T20:' + padStartTwoZeroes(((2 * i) % 60).toString()) + ':25.000Z'), + finished_at: new Date( + '2018-02-12T20:' + padStartTwoZeroes(((2 * i) % 60).toString()) + ':25.000Z', + ), id: 'Some-run-id-' + i, metrics: [ { @@ -655,13 +706,15 @@ function generateNRuns(): ApiRunDetail[] { pipeline_id: 'Some-pipeline-id-' + i, pipeline_name: 'Kubeflow Pipeline number ' + i, }, - resource_references: [{ - key: { - id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '275ea11d-ac63-4ce3-bc33-ec81981ed56a', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], scheduled_at: new Date('2018-02-12T20:' + padStartTwoZeroes(i.toString()) + ':23.000Z'), status: 'Succeeded', }, @@ -697,17 +750,19 @@ function generateNJobs(): ApiJob[] { { name: 'output', value: 'gs://path-to-my-project', - } + }, ], pipeline_id: pipelines[i % pipelines.length].id, }, - resource_references: [{ - key: { - id: '7fc01714-4a13-4c05-5902-a8a72c14253b', - type: ApiResourceType.EXPERIMENT, + resource_references: [ + { + key: { + id: '7fc01714-4a13-4c05-5902-a8a72c14253b', + type: ApiResourceType.EXPERIMENT, + }, + relationship: ApiRelationship.OWNER, }, - relationship: ApiRelationship.OWNER, - }], + ], status: 'Succeeded', trigger: undefined, updated_at: new Date('2018-02-01T20:' + padStartTwoZeroes(i.toString()) + ':23.000Z'), diff --git a/frontend/mock-backend/hello-world-runtime.ts b/frontend/mock-backend/hello-world-runtime.ts index 02f554ee1f6..ab74a1d601a 100644 --- a/frontend/mock-backend/hello-world-runtime.ts +++ b/frontend/mock-backend/hello-world-runtime.ts @@ -24,8 +24,8 @@ export default { creationTimestamp: '2018-06-06T00:04:49Z', labels: { 'workflows.argoproj.io/completed': 'true', - 'workflows.argoproj.io/phase': 'Succeeded' - } + 'workflows.argoproj.io/phase': 'Succeeded', + }, }, spec: { templates: [ @@ -37,25 +37,21 @@ export default { container: { name: '', image: 'docker/whalesay:latest', - command: [ - 'cowsay' - ], - args: [ - '{{workflow.parameters.message}}' - ], - resources: {} - } - } + command: ['cowsay'], + args: ['{{workflow.parameters.message}}'], + resources: {}, + }, + }, ], entrypoint: 'whalesay1', arguments: { parameters: [ { name: 'message', - value: 'hello world' - } - ] - } + value: 'hello world', + }, + ], + }, }, status: { phase: 'Succeeded', @@ -70,8 +66,8 @@ export default { templateName: 'whalesay1', phase: 'Succeeded', startedAt: '2018-06-06T00:04:49Z', - finishedAt: '2018-06-06T00:05:23Z' - } - } - } + finishedAt: '2018-06-06T00:05:23Z', + }, + }, + }, }; diff --git a/frontend/mock-backend/hello-world-with-steps-runtime.ts b/frontend/mock-backend/hello-world-with-steps-runtime.ts index a7c672a87a7..3c581afe112 100644 --- a/frontend/mock-backend/hello-world-with-steps-runtime.ts +++ b/frontend/mock-backend/hello-world-with-steps-runtime.ts @@ -18,14 +18,15 @@ export default { name: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c', namespace: 'default', // tslint:disable-next-line:max-line-length - selfLink: '/apis/argoproj.io/v1alpha1/namespaces/default/workflows/hello-world-61985dbf-4299-458b-a183-1f2c2436c21c', + selfLink: + '/apis/argoproj.io/v1alpha1/namespaces/default/workflows/hello-world-61985dbf-4299-458b-a183-1f2c2436c21c', uid: 'ef2a4a61-6e84-11e8-bba7-42010a8a0fc2', resourceVersion: '10690686', creationTimestamp: '2018-06-12T21:09:46Z', labels: { 'workflows.argoproj.io/completed': 'true', - 'workflows.argoproj.io/phase': 'Succeeded' - } + 'workflows.argoproj.io/phase': 'Succeeded', + }, }, spec: { templates: [ @@ -39,10 +40,10 @@ export default { { name: 'say', template: 'say', - arguments: {} - } - ] - ] + arguments: {}, + }, + ], + ], }, { name: 'say', @@ -52,18 +53,14 @@ export default { container: { name: '', image: 'docker/whalesay:latest', - command: [ - 'cowsay' - ], - args: [ - 'hello world' - ], - resources: {} - } - } + command: ['cowsay'], + args: ['hello world'], + resources: {}, + }, + }, ], entrypoint: 'whalesay', - arguments: {} + arguments: {}, }, status: { phase: 'Succeeded', @@ -79,9 +76,7 @@ export default { phase: 'Succeeded', startedAt: '2018-06-12T21:09:46Z', finishedAt: '2018-06-12T21:09:47Z', - children: [ - 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-2303694156' - ] + children: ['hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-2303694156'], }, 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-2303694156': { id: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-2303694156', @@ -92,9 +87,7 @@ export default { boundaryID: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c', startedAt: '2018-06-12T21:09:46Z', finishedAt: '2018-06-12T21:09:47Z', - children: [ - 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-3584189705' - ] + children: ['hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-3584189705'], }, 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-3584189705': { id: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c-3584189705', @@ -105,8 +98,8 @@ export default { phase: 'Succeeded', boundaryID: 'hello-world-61985dbf-4299-458b-a183-1f2c2436c21c', startedAt: '2018-06-12T21:09:46Z', - finishedAt: '2018-06-12T21:09:47Z' - } - } - } + finishedAt: '2018-06-12T21:09:47Z', + }, + }, + }, }; diff --git a/frontend/mock-backend/integration-test-runtime.ts b/frontend/mock-backend/integration-test-runtime.ts index 6716f345487..b63cd071bee 100644 --- a/frontend/mock-backend/integration-test-runtime.ts +++ b/frontend/mock-backend/integration-test-runtime.ts @@ -17,7 +17,8 @@ export default { metadata: { name: 'job-cloneofhelloworldls94q-1-3667110102', namespace: 'kubeflow', - selfLink: '/apis/argoproj.io/v1alpha1/namespaces/kubeflow/workflows/job-cloneofhelloworldls94q-1-3667110102', + selfLink: + '/apis/argoproj.io/v1alpha1/namespaces/kubeflow/workflows/job-cloneofhelloworldls94q-1-3667110102', uid: '55dc2b6d-d688-11e8-83db-42010a800093', resourceVersion: '128069', creationTimestamp: '2018-10-23T05:56:07Z', @@ -27,7 +28,7 @@ export default { 'scheduledworkflows.kubeflow.org/workflowEpoch': '1540274157', 'scheduledworkflows.kubeflow.org/workflowIndex': '1', 'workflows.argoproj.io/completed': 'true', - 'workflows.argoproj.io/phase': 'Succeeded' + 'workflows.argoproj.io/phase': 'Succeeded', }, ownerReferences: [ { @@ -36,9 +37,9 @@ export default { name: 'job-cloneofhelloworldls94q', uid: '4fac8e0f-d688-11e8-83db-42010a800093', controller: true, - blockOwnerDeletion: true - } - ] + blockOwnerDeletion: true, + }, + ], }, spec: { templates: [ @@ -56,10 +57,10 @@ export default { parameters: [ { name: 'message', - value: '{{workflow.parameters.message}} from node: A' - } - ] - } + value: '{{workflow.parameters.message}} from node: A', + }, + ], + }, }, { name: 'B', @@ -68,13 +69,11 @@ export default { parameters: [ { name: 'message', - value: '{{workflow.parameters.message}} from node: B' - } - ] + value: '{{workflow.parameters.message}} from node: B', + }, + ], }, - dependencies: [ - 'A' - ] + dependencies: ['A'], }, { name: 'C', @@ -83,13 +82,11 @@ export default { parameters: [ { name: 'message', - value: '{{workflow.parameters.message}} from node: C' - } - ] + value: '{{workflow.parameters.message}} from node: C', + }, + ], }, - dependencies: [ - 'A' - ] + dependencies: ['A'], }, { name: 'D', @@ -98,49 +95,43 @@ export default { parameters: [ { name: 'message', - value: '{{workflow.parameters.message}} from node: D' - } - ] + value: '{{workflow.parameters.message}} from node: D', + }, + ], }, - dependencies: [ - 'B', - 'C' - ] - } - ] - } + dependencies: ['B', 'C'], + }, + ], + }, }, { name: 'echo', inputs: { parameters: [ { - name: 'message' - } - ] + name: 'message', + }, + ], }, outputs: {}, metadata: {}, container: { name: '', image: 'alpine:3.7', - command: [ - 'echo', - '{{inputs.parameters.message}}' - ], - resources: {} - } - } + command: ['echo', '{{inputs.parameters.message}}'], + resources: {}, + }, + }, ], entrypoint: 'diamond', arguments: { parameters: [ { name: 'message', - value: 'hello world' - } - ] - } + value: 'hello world', + }, + ], + }, }, status: { phase: 'Succeeded', @@ -156,12 +147,8 @@ export default { phase: 'Succeeded', startedAt: '2018-10-23T05:56:07Z', finishedAt: '2018-10-23T05:56:25Z', - children: [ - 'job-cloneofhelloworldls94q-1-3667110102-3867833025' - ], - outboundNodes: [ - 'job-cloneofhelloworldls94q-1-3667110102-3918165882' - ] + children: ['job-cloneofhelloworldls94q-1-3667110102-3867833025'], + outboundNodes: ['job-cloneofhelloworldls94q-1-3667110102-3918165882'], }, 'job-cloneofhelloworldls94q-1-3667110102-3817500168': { id: 'job-cloneofhelloworldls94q-1-3667110102-3817500168', @@ -177,13 +164,11 @@ export default { parameters: [ { name: 'message', - value: 'hello world from node: B' - } - ] + value: 'hello world from node: B', + }, + ], }, - children: [ - 'job-cloneofhelloworldls94q-1-3667110102-3918165882' - ] + children: ['job-cloneofhelloworldls94q-1-3667110102-3918165882'], }, 'job-cloneofhelloworldls94q-1-3667110102-3834277787': { id: 'job-cloneofhelloworldls94q-1-3667110102-3834277787', @@ -199,13 +184,11 @@ export default { parameters: [ { name: 'message', - value: 'hello world from node: C' - } - ] + value: 'hello world from node: C', + }, + ], }, - children: [ - 'job-cloneofhelloworldls94q-1-3667110102-3918165882' - ] + children: ['job-cloneofhelloworldls94q-1-3667110102-3918165882'], }, 'job-cloneofhelloworldls94q-1-3667110102-3867833025': { id: 'job-cloneofhelloworldls94q-1-3667110102-3867833025', @@ -221,14 +204,14 @@ export default { parameters: [ { name: 'message', - value: 'hello world from node: A' - } - ] + value: 'hello world from node: A', + }, + ], }, children: [ 'job-cloneofhelloworldls94q-1-3667110102-3817500168', - 'job-cloneofhelloworldls94q-1-3667110102-3834277787' - ] + 'job-cloneofhelloworldls94q-1-3667110102-3834277787', + ], }, 'job-cloneofhelloworldls94q-1-3667110102-3918165882': { id: 'job-cloneofhelloworldls94q-1-3667110102-3918165882', @@ -244,11 +227,11 @@ export default { parameters: [ { name: 'message', - value: 'hello world from node: D' - } - ] - } - } - } - } + value: 'hello world from node: D', + }, + ], + }, + }, + }, + }, }; diff --git a/frontend/mock-backend/json-runtime.ts b/frontend/mock-backend/json-runtime.ts index f4f8351e107..f68c4ff98e0 100644 --- a/frontend/mock-backend/json-runtime.ts +++ b/frontend/mock-backend/json-runtime.ts @@ -24,8 +24,8 @@ export default { creationTimestamp: '2018-06-06T00:04:49Z', labels: { 'workflows.argoproj.io/completed': 'true', - 'workflows.argoproj.io/phase': 'Succeeded' - } + 'workflows.argoproj.io/phase': 'Succeeded', + }, }, spec: { templates: [ @@ -37,25 +37,21 @@ export default { container: { name: '', image: 'docker/whalesay:latest', - command: [ - 'cowsay' - ], - args: [ - '{{workflow.parameters.message}}' - ], - resources: {} - } - } + command: ['cowsay'], + args: ['{{workflow.parameters.message}}'], + resources: {}, + }, + }, ], entrypoint: 'whalesay1', arguments: { parameters: [ { name: 'message', - value: 'hello world' - } - ] - } + value: 'hello world', + }, + ], + }, }, status: { phase: 'Succeeded', @@ -76,24 +72,20 @@ export default { { name: 'JSON Data', value: JSON.stringify({ - 'string1': 'a', - 'string2': 'b', - 'number1': 1, - 'number2': 2.2, - 'object': { - 'string': 'a', - 'number': 2 + string1: 'a', + string2: 'b', + number1: 1, + number2: 2.2, + object: { + string: 'a', + number: 2, }, - 'array': [ - 'a', - 'b', - 'c' - ] - }) - } - ] - } - } - } - } + array: ['a', 'b', 'c'], + }), + }, + ], + }, + }, + }, + }, }; diff --git a/frontend/mock-backend/mock-api-middleware.ts b/frontend/mock-backend/mock-api-middleware.ts index 4b4d6e0f3c3..c05f205b199 100644 --- a/frontend/mock-backend/mock-api-middleware.ts +++ b/frontend/mock-backend/mock-api-middleware.ts @@ -17,7 +17,7 @@ import * as express from 'express'; import * as fs from 'fs'; import RunUtils from '../src/lib/RunUtils'; import helloWorldRuntime from './integration-test-runtime'; -import proxyMiddleware from '../server/proxy-middleware'; +import proxyMiddleware from './proxy-middleware'; import { ApiFilter, PredicateOp } from '../src/apis/filter'; import { ApiListExperimentsResponse, ApiExperiment } from '../src/apis/experiment'; import { ApiListJobsResponse, ApiJob } from '../src/apis/job'; @@ -53,7 +53,6 @@ interface BaseResource { // tslint:disable-next-line:no-default-export export default (app: express.Application) => { - app.use((req, _, next) => { // tslint:disable-next-line:no-console console.info(req.method + ' ' + req.originalUrl); @@ -71,7 +70,7 @@ export default (app: express.Application) => { apiServerCommitHash: 'd3c4add0a95e930c70a330466d0923827784eb9a', apiServerReady: true, buildDate: 'Wed Jan 9 19:40:24 UTC 2019', - frontendCommitHash: '8efb2fcff9f666ba5b101647e909dc9c6889cecb' + frontendCommitHash: '8efb2fcff9f666ba5b101647e909dc9c6889cecb', }); }); @@ -79,7 +78,10 @@ export default (app: express.Application) => { res.sendStatus(200); }); - function getSortKeyAndOrder(defaultSortKey: string, queryParam?: string): { desc: boolean, key: string } { + function getSortKeyAndOrder( + defaultSortKey: string, + queryParam?: string, + ): { desc: boolean; key: string } { let key = defaultSortKey; let desc = false; @@ -88,8 +90,10 @@ export default (app: express.Application) => { key = keyParts[0]; // Check that the key is properly formatted. - if (keyParts.length > 2 || - (keyParts.length === 2 && keyParts[1] !== 'asc' && keyParts[1] !== 'desc')) { + if ( + keyParts.length > 2 || + (keyParts.length === 2 && keyParts[1] !== 'asc' && keyParts[1] !== 'desc') + ) { throw new Error(`Invalid sort string: ${queryParam}`); } @@ -124,7 +128,7 @@ export default (app: express.Application) => { return result * (desc ? -1 : 1); }); - const start = (req.query.page_token ? +req.query.page_token : 0); + const start = req.query.page_token ? +req.query.page_token : 0; const end = start + (+req.query.page_size || 20); response.jobs = jobs.slice(start, end); @@ -161,7 +165,7 @@ export default (app: express.Application) => { return result * (desc ? -1 : 1); }); - const start = (req.query.pageToken ? +req.query.pageToken : 0); + const start = req.query.pageToken ? +req.query.pageToken : 0; const end = start + (+req.query.pageSize || 20); response.experiments = experiments.slice(start, end); @@ -175,7 +179,7 @@ export default (app: express.Application) => { app.post(v1beta1Prefix + '/experiments', (req, res) => { const experiment: ApiExperiment = req.body; if (fixedData.experiments.find(e => e.name!.toLowerCase() === experiment.name!.toLowerCase())) { - res.status(404).send('An experiment with teh same name already exists'); + res.status(404).send('An experiment with the same name already exists'); return; } experiment.id = 'new-experiment-' + (fixedData.experiments.length + 1); @@ -186,10 +190,9 @@ export default (app: express.Application) => { }, 1000); }); - app.get(v1beta1Prefix + '/experiments/:eid', (req, res) => { res.header('Content-Type', 'application/json'); - const experiment = fixedData.experiments.find((exp) => exp.id === req.params.eid); + const experiment = fixedData.experiments.find(exp => exp.id === req.params.eid); if (!experiment) { res.status(404).send(`No experiment was found with ID: ${req.params.eid}`); return; @@ -227,7 +230,7 @@ export default (app: express.Application) => { res.header('Content-Type', 'application/json'); switch (req.method) { case 'DELETE': - const i = fixedData.jobs.findIndex((j) => j.id === req.params.jid); + const i = fixedData.jobs.findIndex(j => j.id === req.params.jid); if (fixedData.jobs[i].name!.startsWith('Cannot be deleted')) { res.status(502).send(`Deletion failed for job: '${fixedData.jobs[i].name}'`); } else { @@ -237,7 +240,7 @@ export default (app: express.Application) => { } break; case 'GET': - const job = fixedData.jobs.find((j) => j.id === req.params.jid); + const job = fixedData.jobs.find(j => j.id === req.params.jid); if (job) { res.json(job); } else { @@ -257,15 +260,20 @@ export default (app: express.Application) => { runs: [], }; - let runs: ApiRun[] = fixedData.runs.map((r) => r.run!); + let runs: ApiRun[] = fixedData.runs.map(r => r.run!); if (req.query.filter) { runs = filterResources(runs, req.query.filter); } if (req.query['resource_reference_key.type'] === ApiResourceType.EXPERIMENT) { - runs = runs.filter((r) => RunUtils.getAllExperimentReferences(r) - .some((ref) => ref.key && ref.key.id && ref.key.id === req.query['resource_reference_key.id'] || false)); + runs = runs.filter(r => + RunUtils.getAllExperimentReferences(r).some( + ref => + (ref.key && ref.key.id && ref.key.id === req.query['resource_reference_key.id']) || + false, + ), + ); } const { desc, key } = getSortKeyAndOrder(RunSortKeys.CREATED_AT, req.query.sort_by); @@ -281,7 +289,7 @@ export default (app: express.Application) => { return result * (desc ? -1 : 1); }); - const start = (req.query.page_token ? +req.query.page_token : 0); + const start = req.query.page_token ? +req.query.page_token : 0; const end = start + (+req.query.page_size || 20); response.runs = runs.slice(start, end); @@ -294,7 +302,7 @@ export default (app: express.Application) => { app.get(v1beta1Prefix + '/runs/:rid', (req, res) => { const rid = req.params.rid; - const run = fixedData.runs.find((r) => r.run!.id === rid); + const run = fixedData.runs.find(r => r.run!.id === rid); if (!run) { res.status(404).send('Cannot find a run with id: ' + rid); return; @@ -327,8 +335,8 @@ export default (app: express.Application) => { } const runDetail = fixedData.runs.find(r => r.run!.id === req.params.rid); if (runDetail) { - runDetail.run!.storage_state = req.params.method === 'archive' ? - RunStorageState.ARCHIVED : RunStorageState.AVAILABLE; + runDetail.run!.storage_state = + req.params.method === 'archive' ? RunStorageState.ARCHIVED : RunStorageState.AVAILABLE; res.json({}); } else { res.status(500).send('Cannot find a run with id ' + req.params.rid); @@ -337,7 +345,7 @@ export default (app: express.Application) => { app.post(v1beta1Prefix + '/jobs/:jid/enable', (req, res) => { setTimeout(() => { - const job = fixedData.jobs.find((j) => j.id === req.params.jid); + const job = fixedData.jobs.find(j => j.id === req.params.jid); if (job) { job.enabled = true; res.json({}); @@ -349,7 +357,7 @@ export default (app: express.Application) => { app.post(v1beta1Prefix + '/jobs/:jid/disable', (req, res) => { setTimeout(() => { - const job = fixedData.jobs.find((j) => j.id === req.params.jid); + const job = fixedData.jobs.find(j => j.id === req.params.jid); if (job) { job.enabled = false; res.json({}); @@ -369,15 +377,22 @@ export default (app: express.Application) => { switch (p.op) { case PredicateOp.EQUALS: if (p.key === 'name') { - return r.name && r.name.toLocaleLowerCase() === (p.string_value || '').toLocaleLowerCase(); + return ( + r.name && r.name.toLocaleLowerCase() === (p.string_value || '').toLocaleLowerCase() + ); } else if (p.key === 'storage_state') { - return (r as ApiRun).storage_state && (r as ApiRun).storage_state!.toString() === p.string_value; + return ( + (r as ApiRun).storage_state && + (r as ApiRun).storage_state!.toString() === p.string_value + ); } else { throw new Error(`Key: ${p.key} is not yet supported by the mock API server`); } case PredicateOp.NOTEQUALS: if (p.key === 'name') { - return r.name && r.name.toLocaleLowerCase() !== (p.string_value || '').toLocaleLowerCase(); + return ( + r.name && r.name.toLocaleLowerCase() !== (p.string_value || '').toLocaleLowerCase() + ); } else if (p.key === 'storage_state') { return ((r as ApiRun).storage_state || {}).toString() !== p.string_value; } else { @@ -387,7 +402,10 @@ export default (app: express.Application) => { if (p.key !== 'name') { throw new Error(`Key: ${p.key} is not yet supported by the mock API server`); } - return r.name && r.name.toLocaleLowerCase().includes((p.string_value || '').toLocaleLowerCase()); + return ( + r.name && + r.name.toLocaleLowerCase().includes((p.string_value || '').toLocaleLowerCase()) + ); case PredicateOp.NOTEQUALS: // Fall through case PredicateOp.GREATERTHAN: @@ -432,7 +450,7 @@ export default (app: express.Application) => { return result * (desc ? -1 : 1); }); - const start = (req.query.page_token ? +req.query.page_token : 0); + const start = req.query.page_token ? +req.query.page_token : 0; const end = start + (+req.query.page_size || 20); response.pipelines = pipelines.slice(start, end); @@ -445,7 +463,7 @@ export default (app: express.Application) => { app.delete(v1beta1Prefix + '/pipelines/:pid', (req, res) => { res.header('Content-Type', 'application/json'); - const i = fixedData.pipelines.findIndex((p) => p.id === req.params.pid); + const i = fixedData.pipelines.findIndex(p => p.id === req.params.pid); if (i === -1) { res.status(404).send(`No pipelines was found with ID: ${req.params.pid}`); @@ -463,7 +481,7 @@ export default (app: express.Application) => { app.get(v1beta1Prefix + '/pipelines/:pid', (req, res) => { res.header('Content-Type', 'application/json'); - const pipeline = fixedData.pipelines.find((p) => p.id === req.params.pid); + const pipeline = fixedData.pipelines.find(p => p.id === req.params.pid); if (!pipeline) { res.status(404).send(`No pipeline was found with ID: ${req.params.pid}`); return; @@ -473,7 +491,7 @@ export default (app: express.Application) => { app.get(v1beta1Prefix + '/pipelines/:pid/templates', (req, res) => { res.header('Content-Type', 'text/x-yaml'); - const pipeline = fixedData.pipelines.find((p) => p.id === req.params.pid); + const pipeline = fixedData.pipelines.find(p => p.id === req.params.pid); if (!pipeline) { res.status(404).send(`No pipeline was found with ID: ${req.params.pid}`); return; @@ -492,9 +510,10 @@ export default (app: express.Application) => { function mockCreatePipeline(res: Response, name: string, body?: any): void { res.header('Content-Type', 'application/json'); // Don't allow uploading multiple pipelines with the same name - if (fixedData.pipelines.find((p) => p.name === name)) { - res.status(502).send( - `A Pipeline named: "${name}" already exists. Please choose a different name.`); + if (fixedData.pipelines.find(p => p.name === name)) { + res + .status(502) + .send(`A Pipeline named: "${name}" already exists. Please choose a different name.`); } else { const pipeline = body || {}; pipeline.id = 'new-pipeline-' + (fixedData.pipelines.length + 1); @@ -504,13 +523,13 @@ export default (app: express.Application) => { 'TODO: the mock middleware does not actually use the uploaded pipeline'; pipeline.parameters = [ { - name: 'output' + name: 'output', }, { - name: 'param-1' + name: 'param-1', }, { - name: 'param-2' + name: 'param-2', }, ]; fixedData.pipelines.push(pipeline); @@ -586,6 +605,20 @@ export default (app: express.Application) => { res.send(true); }); + // Uncomment this instead to test 404 endpoints. + // app.get('/system/cluster-name', (_, res) => { + // res.status(404).send('404 Not Found'); + // }); + // app.get('/system/project-id', (_, res) => { + // res.status(404).send('404 Not Found'); + // }); + app.get('/system/cluster-name', (_, res) => { + res.send('mock-cluster-name'); + }); + app.get('/system/project-id', (_, res) => { + res.send('mock-project-id'); + }); + app.all(v1beta1Prefix + '*', (req, res) => { res.status(404).send('Bad request endpoint.'); }); diff --git a/frontend/mock-backend/mock-api-server.ts b/frontend/mock-backend/mock-api-server.ts index acc3e714867..24c8a9b6803 100644 --- a/frontend/mock-backend/mock-api-server.ts +++ b/frontend/mock-backend/mock-api-server.ts @@ -12,13 +12,16 @@ // See the License for the specific language governing permissions and // limitations under the License. -import * as express from 'express'; +import express from 'express'; import mockApiMiddleware from './mock-api-middleware'; const app = express(); const port = process.argv[2] || 3001; -app.use((_, res, next) => { +// Uncomment the following line to get 1000ms delay to all requests +// app.use((req, res, next) => { setTimeout(next, 1000); }); + +app.use((_: any, res: any, next: any) => { res.header('Access-Control-Allow-Origin', '*'); res.header('Access-Control-Allow-Headers', 'X-Requested-With, content-type'); res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS, PUT, DELETE'); diff --git a/frontend/mock-backend/mock-coinflip-runtime.ts b/frontend/mock-backend/mock-coinflip-runtime.ts index 6f7dc1e6869..e8d2d4abce4 100644 --- a/frontend/mock-backend/mock-coinflip-runtime.ts +++ b/frontend/mock-backend/mock-coinflip-runtime.ts @@ -24,8 +24,8 @@ export default { creationTimestamp: '2018-04-17T20:58:23Z', labels: { 'workflows.argoproj.io/completed': 'true', - 'workflows.argoproj.io/phase': 'Succeeded' - } + 'workflows.argoproj.io/phase': 'Succeeded', + }, }, spec: { templates: [ @@ -39,24 +39,24 @@ export default { { name: 'flip-coin', template: 'flip-coin', - arguments: {} - } + arguments: {}, + }, ], [ { name: 'heads', template: 'heads', arguments: {}, - when: '{{steps.flip-coin.outputs.result}} == heads' + when: '{{steps.flip-coin.outputs.result}} == heads', }, { name: 'tails', template: 'coinflip', arguments: {}, - when: '{{steps.flip-coin.outputs.result}} == tails' - } - ] - ] + when: '{{steps.flip-coin.outputs.result}} == tails', + }, + ], + ], }, { name: 'flip-coin', @@ -66,13 +66,12 @@ export default { script: { name: '', image: 'python:alpine3.6', - command: [ - 'python' - ], + command: ['python'], resources: {}, // tslint:disable-next-line:max-line-length - source: 'import random\nresult = "heads" if random.randint(0,1) == 0 else "tails"\nprint(result)\n' - } + source: + 'import random\nresult = "heads" if random.randint(0,1) == 0 else "tails"\nprint(result)\n', + }, }, { name: 'heads', @@ -82,30 +81,25 @@ export default { container: { name: '', image: 'alpine:3.6', - command: [ - 'sh', - '-c' - ], - args: [ - 'echo "it was heads"' - ], - resources: {} - } - } + command: ['sh', '-c'], + args: ['echo "it was heads"'], + resources: {}, + }, + }, ], entrypoint: 'coinflip', arguments: { parameters: [ { name: 'x', - value: 10 + value: 10, }, { name: 'y', - value: 20 - } - ] - } + value: 20, + }, + ], + }, }, status: { phase: 'Succeeded', @@ -121,78 +115,74 @@ export default { phase: 'Succeeded', startedAt: '2018-04-17T20:58:23Z', finishedAt: '2018-04-17T20:58:38Z', - children: [ - 'coinflip-recursive-q7dqb-1787723858', - 'coinflip-recursive-q7dqb-1720466287' - ], - outboundNodes: [ - 'coinflip-recursive-q7dqb-3721646052' - ] + children: ['coinflip-recursive-q7dqb-1787723858', 'coinflip-recursive-q7dqb-1720466287'], + outboundNodes: ['coinflip-recursive-q7dqb-3721646052'], }, 'coinflip-recursive-q7dqb-1720466287': { id: 'coinflip-recursive-q7dqb-1720466287', name: 'coinflip-recursive-q7dqb[1]', displayName: '[1]', outputs: { - artifacts: [{ - name: 'mlpipeline-ui-metadata', - s3: { - bucket: 'somebucket', - key: 'staging', + artifacts: [ + { + name: 'mlpipeline-ui-metadata', + s3: { + bucket: 'somebucket', + key: 'staging', + }, }, - }], + ], }, type: 'StepGroup', phase: 'Succeeded', boundaryID: 'coinflip-recursive-q7dqb', startedAt: '2018-04-17T20:58:28Z', finishedAt: '2018-04-17T20:58:38Z', - children: [ - 'coinflip-recursive-q7dqb-4011569486', - 'coinflip-recursive-q7dqb-3266226990' - ] + children: ['coinflip-recursive-q7dqb-4011569486', 'coinflip-recursive-q7dqb-3266226990'], }, 'coinflip-recursive-q7dqb-1787723858': { id: 'coinflip-recursive-q7dqb-1787723858', name: 'coinflip-recursive-q7dqb[0]', displayName: '[0]', outputs: { - artifacts: [{ - name: 'mlpipeline-ui-metadata', - s3: { - bucket: 'somebucket', - key: 'analysis2', + artifacts: [ + { + name: 'mlpipeline-ui-metadata', + s3: { + bucket: 'somebucket', + key: 'analysis2', + }, }, - }], + ], }, type: 'StepGroup', phase: 'Succeeded', boundaryID: 'coinflip-recursive-q7dqb', startedAt: '2018-04-17T20:58:23Z', finishedAt: '2018-04-17T20:58:28Z', - children: [ - 'coinflip-recursive-q7dqb-311338607' - ] + children: ['coinflip-recursive-q7dqb-311338607'], }, 'coinflip-recursive-q7dqb-2934726852': { id: 'coinflip-recursive-q7dqb-2934726852', name: 'coinflip-recursive-q7dqb[1].tails[1].tails', displayName: 'tails', outputs: { - artifacts: [{ - name: 'mlpipeline-ui-metadata', - s3: { - bucket: 'somebucket', - key: 'transform', + artifacts: [ + { + name: 'mlpipeline-ui-metadata', + s3: { + bucket: 'somebucket', + key: 'transform', + }, }, - }], + ], }, type: 'Skipped', phase: 'Skipped', boundaryID: 'coinflip-recursive-q7dqb-3266226990', - message: 'when \'heads == tails\' evaluated false', + message: "when 'heads == tails' evaluated false", startedAt: '2018-04-17T20:58:34Z', - finishedAt: '2018-04-17T20:58:34Z' + finishedAt: '2018-04-17T20:58:34Z', }, 'coinflip-recursive-q7dqb-311338607': { id: 'coinflip-recursive-q7dqb-311338607', @@ -205,23 +195,23 @@ export default { startedAt: '2018-04-17T20:58:23Z', finishedAt: '2018-04-17T20:58:28Z', outputs: { - artifacts: [{ - name: 'mlpipeline-ui-metadata', - s3: { - bucket: 'somebucket', - key: 'model2', + artifacts: [ + { + name: 'mlpipeline-ui-metadata', + s3: { + bucket: 'somebucket', + key: 'model2', + }, }, - }], + ], parameters: [ { name: 'result', - value: 'tails' - } - ] + value: 'tails', + }, + ], }, - children: [ - 'coinflip-recursive-q7dqb-1720466287' - ] + children: ['coinflip-recursive-q7dqb-1720466287'], }, 'coinflip-recursive-q7dqb-3266226990': { id: 'coinflip-recursive-q7dqb-3266226990', @@ -233,13 +223,8 @@ export default { boundaryID: 'coinflip-recursive-q7dqb', startedAt: '2018-04-17T20:58:28Z', finishedAt: '2018-04-17T20:58:38Z', - children: [ - 'coinflip-recursive-q7dqb-4010083248', - 'coinflip-recursive-q7dqb-855846949' - ], - outboundNodes: [ - 'coinflip-recursive-q7dqb-3721646052' - ] + children: ['coinflip-recursive-q7dqb-4010083248', 'coinflip-recursive-q7dqb-855846949'], + outboundNodes: ['coinflip-recursive-q7dqb-3721646052'], }, 'coinflip-recursive-q7dqb-3466727817': { id: 'coinflip-recursive-q7dqb-3466727817', @@ -255,13 +240,11 @@ export default { parameters: [ { name: 'result', - value: 'heads' - } - ] + value: 'heads', + }, + ], }, - children: [ - 'coinflip-recursive-q7dqb-855846949' - ] + children: ['coinflip-recursive-q7dqb-855846949'], }, 'coinflip-recursive-q7dqb-3721646052': { id: 'coinflip-recursive-q7dqb-3721646052', @@ -272,7 +255,7 @@ export default { phase: 'Succeeded', boundaryID: 'coinflip-recursive-q7dqb-3266226990', startedAt: '2018-04-17T20:58:34Z', - finishedAt: '2018-04-17T20:58:37Z' + finishedAt: '2018-04-17T20:58:37Z', }, 'coinflip-recursive-q7dqb-4010083248': { id: 'coinflip-recursive-q7dqb-4010083248', @@ -283,9 +266,7 @@ export default { boundaryID: 'coinflip-recursive-q7dqb-3266226990', startedAt: '2018-04-17T20:58:28Z', finishedAt: '2018-04-17T20:58:34Z', - children: [ - 'coinflip-recursive-q7dqb-3466727817' - ] + children: ['coinflip-recursive-q7dqb-3466727817'], }, 'coinflip-recursive-q7dqb-4011569486': { id: 'coinflip-recursive-q7dqb-4011569486', @@ -294,9 +275,9 @@ export default { type: 'Skipped', phase: 'Skipped', boundaryID: 'coinflip-recursive-q7dqb', - message: 'when \'tails == heads\' evaluated false', + message: "when 'tails == heads' evaluated false", startedAt: '2018-04-17T20:58:28Z', - finishedAt: '2018-04-17T20:58:28Z' + finishedAt: '2018-04-17T20:58:28Z', }, 'coinflip-recursive-q7dqb-855846949': { id: 'coinflip-recursive-q7dqb-855846949', @@ -307,11 +288,8 @@ export default { boundaryID: 'coinflip-recursive-q7dqb-3266226990', startedAt: '2018-04-17T20:58:34Z', finishedAt: '2018-04-17T20:58:38Z', - children: [ - 'coinflip-recursive-q7dqb-3721646052', - 'coinflip-recursive-q7dqb-2934726852' - ] - } - } - } + children: ['coinflip-recursive-q7dqb-3721646052', 'coinflip-recursive-q7dqb-2934726852'], + }, + }, + }, }; diff --git a/frontend/mock-backend/mock-error-runtime.ts b/frontend/mock-backend/mock-error-runtime.ts index bc46971dbfb..05c93e03f0e 100644 --- a/frontend/mock-backend/mock-error-runtime.ts +++ b/frontend/mock-backend/mock-error-runtime.ts @@ -18,14 +18,15 @@ export default { name: 'coinflip-error-nklng2', namespace: 'default', // tslint:disable-next-line:max-line-length - selfLink: '/apis/argoproj.io/v1alpha1/namespaces/default/workflows/coinflip-heads-c085010d-771a-4cdf-979c-257e991501b5', + selfLink: + '/apis/argoproj.io/v1alpha1/namespaces/default/workflows/coinflip-heads-c085010d-771a-4cdf-979c-257e991501b5', uid: '47a3d09c-7db4-4788-ac55-3f8d908574aa', resourceVersion: '10527150', creationTimestamp: '2018-06-11T22:49:26Z', labels: { 'workflows.argoproj.io/completed': 'true', - 'workflows.argoproj.io/phase': 'Failed' - } + 'workflows.argoproj.io/phase': 'Failed', + }, }, spec: { templates: [ @@ -40,10 +41,10 @@ export default { name: 'heads', template: 'heads', arguments: {}, - when: '{{steps.flip-coin.outputs.result}} == heads' - } - ] - ] + when: '{{steps.flip-coin.outputs.result}} == heads', + }, + ], + ], }, { name: 'heads', @@ -53,25 +54,21 @@ export default { container: { name: '', image: 'alpine:3.6', - command: [ - 'sh', - '-c' - ], - args: [ - 'echo "it was heads"' - ], - resources: {} - } - } + command: ['sh', '-c'], + args: ['echo "it was heads"'], + resources: {}, + }, + }, ], entrypoint: 'coinflip', - arguments: {} + arguments: {}, }, status: { phase: 'Failed', startedAt: '2018-06-11T22:49:26Z', finishedAt: '2018-06-11T22:49:26Z', // tslint:disable-next-line:max-line-length - message: 'invalid spec: templates.coinflip.steps[0].heads failed to resolve {{steps.flip-coin.outputs.result}}' - } + message: + 'invalid spec: templates.coinflip.steps[0].heads failed to resolve {{steps.flip-coin.outputs.result}}', + }, }; diff --git a/frontend/mock-backend/mock-xgboost-runtime.ts b/frontend/mock-backend/mock-xgboost-runtime.ts index 928dac43c02..a25784eadc5 100644 --- a/frontend/mock-backend/mock-xgboost-runtime.ts +++ b/frontend/mock-backend/mock-xgboost-runtime.ts @@ -24,8 +24,8 @@ export default { creationTimestamp: '2018-04-16T23:37:48Z', labels: { 'workflows.argoproj.io/completed': 'true', - 'workflows.argoproj.io/phase': 'Succeeded' - } + 'workflows.argoproj.io/phase': 'Succeeded', + }, }, spec: { templates: [ @@ -34,15 +34,15 @@ export default { inputs: { parameters: [ { - name: 'project' + name: 'project', }, { - name: 'region' + name: 'region', }, { - name: 'cluster' - } - ] + name: 'cluster', + }, + ], }, outputs: {}, metadata: {}, @@ -55,63 +55,63 @@ export default { parameters: [ { name: 'project', - value: '{{inputs.parameters.project}}' + value: '{{inputs.parameters.project}}', }, { name: 'region', - value: '{{inputs.parameters.region}}' + value: '{{inputs.parameters.region}}', }, { name: 'name', - value: '{{inputs.parameters.cluster}}' - } - ] - } - } - ] - ] + value: '{{inputs.parameters.cluster}}', + }, + ], + }, + }, + ], + ], }, { name: 'xgboost-training', inputs: { parameters: [ { - name: 'project' + name: 'project', }, { - name: 'region' + name: 'region', }, { - name: 'cluster' + name: 'cluster', }, { - name: 'output' + name: 'output', }, { - name: 'train' + name: 'train', }, { - name: 'eval' + name: 'eval', }, { - name: 'schema' + name: 'schema', }, { - name: 'target' + name: 'target', }, { - name: 'package' + name: 'package', }, { - name: 'workers' + name: 'workers', }, { - name: 'rounds' + name: 'rounds', }, { - name: 'conf' - } - ] + name: 'conf', + }, + ], }, outputs: {}, metadata: {}, @@ -124,23 +124,23 @@ export default { parameters: [ { name: 'project', - value: '{{inputs.parameters.project}}' + value: '{{inputs.parameters.project}}', }, { name: 'region', - value: '{{inputs.parameters.region}}' + value: '{{inputs.parameters.region}}', }, { name: 'name', - value: '{{inputs.parameters.cluster}}' + value: '{{inputs.parameters.cluster}}', }, { name: 'staging', - value: '{{inputs.parameters.output}}/{{workflow.name}}/staging' - } - ] - } - } + value: '{{inputs.parameters.output}}/{{workflow.name}}/staging', + }, + ], + }, + }, ], [ { @@ -150,31 +150,31 @@ export default { parameters: [ { name: 'project', - value: '{{inputs.parameters.project}}' + value: '{{inputs.parameters.project}}', }, { name: 'region', - value: '{{inputs.parameters.region}}' + value: '{{inputs.parameters.region}}', }, { name: 'cluster', - value: '{{inputs.parameters.cluster}}' + value: '{{inputs.parameters.cluster}}', }, { name: 'output', - value: '{{inputs.parameters.output}}/{{workflow.name}}/analysis' + value: '{{inputs.parameters.output}}/{{workflow.name}}/analysis', }, { name: 'train', - value: '{{inputs.parameters.train}}' + value: '{{inputs.parameters.train}}', }, { name: 'schema', - value: '{{inputs.parameters.schema}}' - } - ] - } - } + value: '{{inputs.parameters.schema}}', + }, + ], + }, + }, ], [ { @@ -184,39 +184,39 @@ export default { parameters: [ { name: 'project', - value: '{{inputs.parameters.project}}' + value: '{{inputs.parameters.project}}', }, { name: 'region', - value: '{{inputs.parameters.region}}' + value: '{{inputs.parameters.region}}', }, { name: 'cluster', - value: '{{inputs.parameters.cluster}}' + value: '{{inputs.parameters.cluster}}', }, { name: 'output', - value: '{{inputs.parameters.output}}/{{workflow.name}}/transform' + value: '{{inputs.parameters.output}}/{{workflow.name}}/transform', }, { name: 'train', - value: '{{inputs.parameters.train}}' + value: '{{inputs.parameters.train}}', }, { name: 'eval', - value: '{{inputs.parameters.eval}}' + value: '{{inputs.parameters.eval}}', }, { name: 'target', - value: '{{inputs.parameters.target}}' + value: '{{inputs.parameters.target}}', }, { name: 'analysis', - value: '{{inputs.parameters.output}}/{{workflow.name}}/analysis' - } - ] - } - } + value: '{{inputs.parameters.output}}/{{workflow.name}}/analysis', + }, + ], + }, + }, ], [ { @@ -226,55 +226,55 @@ export default { parameters: [ { name: 'project', - value: '{{inputs.parameters.project}}' + value: '{{inputs.parameters.project}}', }, { name: 'region', - value: '{{inputs.parameters.region}}' + value: '{{inputs.parameters.region}}', }, { name: 'cluster', - value: '{{inputs.parameters.cluster}}' + value: '{{inputs.parameters.cluster}}', }, { name: 'output', - value: '{{inputs.parameters.output}}/{{workflow.name}}/model' + value: '{{inputs.parameters.output}}/{{workflow.name}}/model', }, { name: 'train', - value: '{{inputs.parameters.output}}/{{workflow.name}}/transform/train/part-*' + value: '{{inputs.parameters.output}}/{{workflow.name}}/transform/train/part-*', }, { name: 'eval', - value: '{{inputs.parameters.output}}/{{workflow.name}}/transform/eval/part-*' + value: '{{inputs.parameters.output}}/{{workflow.name}}/transform/eval/part-*', }, { name: 'target', - value: '{{inputs.parameters.target}}' + value: '{{inputs.parameters.target}}', }, { name: 'analysis', - value: '{{inputs.parameters.output}}/{{workflow.name}}/analysis' + value: '{{inputs.parameters.output}}/{{workflow.name}}/analysis', }, { name: 'package', - value: '{{inputs.parameters.package}}' + value: '{{inputs.parameters.package}}', }, { name: 'workers', - value: '{{inputs.parameters.workers}}' + value: '{{inputs.parameters.workers}}', }, { name: 'rounds', - value: '{{inputs.parameters.rounds}}' + value: '{{inputs.parameters.rounds}}', }, { name: 'conf', - value: '{{inputs.parameters.conf}}' - } - ] - } - } + value: '{{inputs.parameters.conf}}', + }, + ], + }, + }, ], [ { @@ -284,43 +284,43 @@ export default { parameters: [ { name: 'project', - value: '{{inputs.parameters.project}}' + value: '{{inputs.parameters.project}}', }, { name: 'region', - value: '{{inputs.parameters.region}}' + value: '{{inputs.parameters.region}}', }, { name: 'cluster', - value: '{{inputs.parameters.cluster}}' + value: '{{inputs.parameters.cluster}}', }, { name: 'output', - value: '{{inputs.parameters.output}}/{{workflow.name}}/batchpredict' + value: '{{inputs.parameters.output}}/{{workflow.name}}/batchpredict', }, { name: 'eval', - value: '{{inputs.parameters.output}}/{{workflow.name}}/transform/eval/part-*' + value: '{{inputs.parameters.output}}/{{workflow.name}}/transform/eval/part-*', }, { name: 'target', - value: '{{inputs.parameters.target}}' + value: '{{inputs.parameters.target}}', }, { name: 'analysis', - value: '{{inputs.parameters.output}}/{{workflow.name}}/analysis' + value: '{{inputs.parameters.output}}/{{workflow.name}}/analysis', }, { name: 'package', - value: '{{inputs.parameters.package}}' + value: '{{inputs.parameters.package}}', }, { name: 'model', - value: '{{inputs.parameters.output}}/{{workflow.name}}/model' - } - ] - } - } + value: '{{inputs.parameters.output}}/{{workflow.name}}/model', + }, + ], + }, + }, ], [ { @@ -330,383 +330,363 @@ export default { parameters: [ { name: 'output', - value: '{{inputs.parameters.output}}/{{workflow.name}}/confusionmatrix' + value: '{{inputs.parameters.output}}/{{workflow.name}}/confusionmatrix', }, { name: 'predictions', - value: '{{inputs.parameters.output}}/{{workflow.name}}/batchpredict/part-*.csv' + value: '{{inputs.parameters.output}}/{{workflow.name}}/batchpredict/part-*.csv', }, { name: 'analysis', - value: '{{inputs.parameters.output}}/{{workflow.name}}/analysis' + value: '{{inputs.parameters.output}}/{{workflow.name}}/analysis', }, { name: 'target', - value: '{{inputs.parameters.target}}' - } - ] - } - } - ] - ] + value: '{{inputs.parameters.target}}', + }, + ], + }, + }, + ], + ], }, { name: 'createcluster', inputs: { parameters: [ { - name: 'project' + name: 'project', }, { - name: 'region' + name: 'region', }, { - name: 'name' + name: 'name', }, { - name: 'staging' - } - ] + name: 'staging', + }, + ], }, outputs: {}, metadata: {}, container: { name: '', image: 'gcr.io/ml-pipeline/ml-pipeline-dataproc-xgboost', - command: [ - 'sh', - '-c' - ], + command: ['sh', '-c'], args: [ 'python /ml/create_cluster.py --project {{inputs.parameters.project}} ' + - '--region {{inputs.parameters.region}} --name {{inputs.parameters.name}} ' + - '--staging {{inputs.parameters.staging}}' + '--region {{inputs.parameters.region}} --name {{inputs.parameters.name}} ' + + '--staging {{inputs.parameters.staging}}', ], - resources: {} - } + resources: {}, + }, }, { name: 'analyze', inputs: { parameters: [ { - name: 'project' + name: 'project', }, { - name: 'region' + name: 'region', }, { - name: 'cluster' + name: 'cluster', }, { - name: 'output' + name: 'output', }, { - name: 'train' + name: 'train', }, { - name: 'schema' - } - ] + name: 'schema', + }, + ], }, outputs: {}, metadata: {}, container: { name: '', image: 'gcr.io/ml-pipeline/ml-pipeline-dataproc-xgboost', - command: [ - 'sh', - '-c' - ], + command: ['sh', '-c'], args: [ 'python /ml/analyze.py --project {{inputs.parameters.project}} ' + - '--region {{inputs.parameters.region}} --cluster ' + - '{{inputs.parameters.cluster}} --output ' + - '{{inputs.parameters.output}} --train {{inputs.parameters.train}} ' + - '--schema {{inputs.parameters.schema}}' + '--region {{inputs.parameters.region}} --cluster ' + + '{{inputs.parameters.cluster}} --output ' + + '{{inputs.parameters.output}} --train {{inputs.parameters.train}} ' + + '--schema {{inputs.parameters.schema}}', ], - resources: {} - } + resources: {}, + }, }, { name: 'transform', inputs: { parameters: [ { - name: 'project' + name: 'project', }, { - name: 'region' + name: 'region', }, { - name: 'cluster' + name: 'cluster', }, { - name: 'output' + name: 'output', }, { - name: 'train' + name: 'train', }, { - name: 'eval' + name: 'eval', }, { - name: 'target' + name: 'target', }, { - name: 'analysis' - } - ] + name: 'analysis', + }, + ], }, outputs: {}, metadata: {}, container: { name: '', image: 'gcr.io/ml-pipeline/ml-pipeline-dataproc-xgboost', - command: [ - 'sh', - '-c' - ], + command: ['sh', '-c'], args: [ 'python /ml/transform.py --project {{inputs.parameters.project}} ' + - '--region {{inputs.parameters.region}} --cluster ' + - '{{inputs.parameters.cluster}} --output ' + - '{{inputs.parameters.output}} --train {{inputs.parameters.train}} ' + - '--eval {{inputs.parameters.eval}} --target ' + - '{{inputs.parameters.target}} --analysis ' + - '{{inputs.parameters.analysis}}' + '--region {{inputs.parameters.region}} --cluster ' + + '{{inputs.parameters.cluster}} --output ' + + '{{inputs.parameters.output}} --train {{inputs.parameters.train}} ' + + '--eval {{inputs.parameters.eval}} --target ' + + '{{inputs.parameters.target}} --analysis ' + + '{{inputs.parameters.analysis}}', ], - resources: {} - } + resources: {}, + }, }, { name: 'train', inputs: { parameters: [ { - name: 'project' + name: 'project', }, { - name: 'region' + name: 'region', }, { - name: 'cluster' + name: 'cluster', }, { - name: 'output' + name: 'output', }, { - name: 'train' + name: 'train', }, { - name: 'eval' + name: 'eval', }, { - name: 'target' + name: 'target', }, { - name: 'analysis' + name: 'analysis', }, { - name: 'package' + name: 'package', }, { - name: 'workers' + name: 'workers', }, { - name: 'rounds' + name: 'rounds', }, { - name: 'conf' - } - ] + name: 'conf', + }, + ], }, outputs: {}, metadata: {}, container: { name: '', image: 'gcr.io/ml-pipeline/ml-pipeline-dataproc-xgboost', - command: [ - 'sh', - '-c' - ], + command: ['sh', '-c'], args: [ // tslint:disable-next-line:max-line-length - 'python /ml/train.py --project {{inputs.parameters.project}} --region {{inputs.parameters.region}} --cluster {{inputs.parameters.cluster}} --output {{inputs.parameters.output}} --train {{inputs.parameters.train}} --eval {{inputs.parameters.eval}} --target {{inputs.parameters.target}} --analysis {{inputs.parameters.analysis}} --package {{inputs.parameters.package}} --workers {{inputs.parameters.workers}} --rounds {{inputs.parameters.rounds}} --conf {{inputs.parameters.conf}}' + 'python /ml/train.py --project {{inputs.parameters.project}} --region {{inputs.parameters.region}} --cluster {{inputs.parameters.cluster}} --output {{inputs.parameters.output}} --train {{inputs.parameters.train}} --eval {{inputs.parameters.eval}} --target {{inputs.parameters.target}} --analysis {{inputs.parameters.analysis}} --package {{inputs.parameters.package}} --workers {{inputs.parameters.workers}} --rounds {{inputs.parameters.rounds}} --conf {{inputs.parameters.conf}}', ], - resources: {} - } + resources: {}, + }, }, { name: 'batchpredict', inputs: { parameters: [ { - name: 'project' + name: 'project', }, { - name: 'region' + name: 'region', }, { - name: 'cluster' + name: 'cluster', }, { - name: 'output' + name: 'output', }, { - name: 'eval' + name: 'eval', }, { - name: 'model' + name: 'model', }, { - name: 'target' + name: 'target', }, { - name: 'package' + name: 'package', }, { - name: 'analysis' - } - ] + name: 'analysis', + }, + ], }, outputs: {}, metadata: {}, container: { name: '', image: 'gcr.io/ml-pipeline/ml-pipeline-dataproc-xgboost', - command: [ - 'sh', - '-c' - ], + command: ['sh', '-c'], args: [ // tslint:disable-next-line:max-line-length - 'python /ml/predict.py --project {{inputs.parameters.project}} --region {{inputs.parameters.region}} --cluster {{inputs.parameters.cluster}} --output {{inputs.parameters.output}} --predict {{inputs.parameters.eval}} --analysis {{inputs.parameters.analysis}} --target {{inputs.parameters.target}} --model {{inputs.parameters.model}} --package {{inputs.parameters.package}} ' + 'python /ml/predict.py --project {{inputs.parameters.project}} --region {{inputs.parameters.region}} --cluster {{inputs.parameters.cluster}} --output {{inputs.parameters.output}} --predict {{inputs.parameters.eval}} --analysis {{inputs.parameters.analysis}} --target {{inputs.parameters.target}} --model {{inputs.parameters.model}} --package {{inputs.parameters.package}} ', ], - resources: {} - } + resources: {}, + }, }, { name: 'confusionmatrix', inputs: { parameters: [ { - name: 'output' + name: 'output', }, { - name: 'analysis' + name: 'analysis', }, { - name: 'predictions' + name: 'predictions', }, { - name: 'target' - } - ] + name: 'target', + }, + ], }, outputs: {}, metadata: {}, container: { name: '', image: 'gcr.io/ml-pipeline/ml-pipeline-local', - command: [ - 'sh', - '-c' - ], + command: ['sh', '-c'], args: [ // tslint:disable-next-line:max-line-length - 'python /ml/confusion_matrix.py --output {{inputs.parameters.output}} --predictions {{inputs.parameters.predictions}} --analysis {{inputs.parameters.analysis}} --target {{inputs.parameters.target}}' + 'python /ml/confusion_matrix.py --output {{inputs.parameters.output}} --predictions {{inputs.parameters.predictions}} --analysis {{inputs.parameters.analysis}} --target {{inputs.parameters.target}}', ], - resources: {} - } + resources: {}, + }, }, { name: 'deletecluster', inputs: { parameters: [ { - name: 'project' + name: 'project', }, { - name: 'region' + name: 'region', }, { - name: 'name' - } - ] + name: 'name', + }, + ], }, outputs: {}, metadata: {}, container: { name: '', image: 'gcr.io/ml-pipeline/ml-pipeline-dataproc-xgboost', - command: [ - 'sh', - '-c' - ], + command: ['sh', '-c'], args: [ // tslint:disable-next-line:max-line-length - 'python /ml/delete_cluster.py --project {{inputs.parameters.project}} --region {{inputs.parameters.region}} --name {{inputs.parameters.name}}' + 'python /ml/delete_cluster.py --project {{inputs.parameters.project}} --region {{inputs.parameters.region}} --name {{inputs.parameters.name}}', ], - resources: {} - } - } + resources: {}, + }, + }, ], entrypoint: 'xgboost-training', arguments: { parameters: [ { name: 'project', - value: 'ml-pipeline' + value: 'ml-pipeline', }, { name: 'region', - value: 'us-central1' + value: 'us-central1', }, { name: 'cluster', - value: 'xgboost-spark-{{workflow.name}}' + value: 'xgboost-spark-{{workflow.name}}', }, { name: 'output', - value: 'gs://sample-xgbbost-cm-output' + value: 'gs://sample-xgbbost-cm-output', }, { name: 'train', - value: 'gs://ml-pipeline-playground/newsgroup/train.csv' + value: 'gs://ml-pipeline-playground/newsgroup/train.csv', }, { name: 'eval', - value: 'gs://ml-pipeline-playground/newsgroup/eval.csv' + value: 'gs://ml-pipeline-playground/newsgroup/eval.csv', }, { name: 'schema', - value: 'gs://ml-pipeline-playground/newsgroup/schema.json' + value: 'gs://ml-pipeline-playground/newsgroup/schema.json', }, { name: 'target', - value: 'news_label' + value: 'news_label', }, { name: 'package', - // tslint:disable-next-line:max-line-length - value: 'gs://ml-pipeline-playground/xgboost4j-example-0.8-SNAPSHOT-jar-with-dependencies.jar' + // tslint:disable-next-line:max-line-length + value: + 'gs://ml-pipeline-playground/xgboost4j-example-0.8-SNAPSHOT-jar-with-dependencies.jar', }, { name: 'workers', - value: '2' + value: '2', }, { name: 'rounds', - value: '200' + value: '200', }, { name: 'conf', - value: 'gs://ml-pipeline-playground/trainconfcla.json ' - } - ] + value: 'gs://ml-pipeline-playground/trainconfcla.json ', + }, + ], }, - onExit: 'exit-handler' + onExit: 'exit-handler', }, status: { phase: 'Succeeded', @@ -726,54 +706,55 @@ export default { parameters: [ { name: 'project', - value: 'ml-pipeline' + value: 'ml-pipeline', }, { name: 'region', - value: 'us-central1' + value: 'us-central1', }, { name: 'cluster', - value: 'xgboost-spark-xgboost-training-gzkm9' + value: 'xgboost-spark-xgboost-training-gzkm9', }, { name: 'output', - value: 'gs://sample-xgbbost-cm-output' + value: 'gs://sample-xgbbost-cm-output', }, { name: 'train', - value: 'gs://ml-pipeline-playground/newsgroup/train.csv' + value: 'gs://ml-pipeline-playground/newsgroup/train.csv', }, { name: 'eval', - value: 'gs://ml-pipeline-playground/newsgroup/eval.csv' + value: 'gs://ml-pipeline-playground/newsgroup/eval.csv', }, { name: 'schema', - value: 'gs://ml-pipeline-playground/newsgroup/schema.json' + value: 'gs://ml-pipeline-playground/newsgroup/schema.json', }, { name: 'target', - value: 'news_label' + value: 'news_label', }, { name: 'package', - // tslint:disable-next-line:max-line-length - value: 'gs://ml-pipeline-playground/xgboost4j-example-0.8-SNAPSHOT-jar-with-dependencies.jar' + // tslint:disable-next-line:max-line-length + value: + 'gs://ml-pipeline-playground/xgboost4j-example-0.8-SNAPSHOT-jar-with-dependencies.jar', }, { name: 'workers', - value: '2' + value: '2', }, { name: 'rounds', - value: '200' + value: '200', }, { name: 'conf', - value: 'gs://ml-pipeline-playground/trainconfcla.json ' - } - ] + value: 'gs://ml-pipeline-playground/trainconfcla.json ', + }, + ], }, children: [ 'xgboost-training-gzkm9-4204210601', @@ -781,11 +762,9 @@ export default { 'xgboost-training-gzkm9-915503087', 'xgboost-training-gzkm9-982760658', 'xgboost-training-gzkm9-4204798981', - 'xgboost-training-gzkm9-916635920' + 'xgboost-training-gzkm9-916635920', ], - outboundNodes: [ - 'xgboost-training-gzkm9-2203328319' - ] + outboundNodes: ['xgboost-training-gzkm9-2203328319'], }, 'xgboost-training-gzkm9-1253553084': { id: 'xgboost-training-gzkm9-1253553084', @@ -794,31 +773,28 @@ export default { type: 'Steps', templateName: 'exit-handler', phase: 'Pending', - message: 'ImagePullBackOff: Back-off pulling image "gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster"', + message: + 'ImagePullBackOff: Back-off pulling image "gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster"', startedAt: '2018-04-17T00:10:06Z', finishedAt: '2018-04-17T00:12:01Z', inputs: { parameters: [ { name: 'project', - value: 'ml-pipeline' + value: 'ml-pipeline', }, { name: 'region', - value: 'us-central1' + value: 'us-central1', }, { name: 'cluster', - value: 'xgboost-spark-xgboost-training-gzkm9' - } - ] + value: 'xgboost-spark-xgboost-training-gzkm9', + }, + ], }, - children: [ - 'xgboost-training-gzkm9-3439262870' - ], - outboundNodes: [ - 'xgboost-training-gzkm9-3721733163' - ] + children: ['xgboost-training-gzkm9-3439262870'], + outboundNodes: ['xgboost-training-gzkm9-3721733163'], }, 'xgboost-training-gzkm9-1761585008': { id: 'xgboost-training-gzkm9-1761585008', @@ -834,46 +810,45 @@ export default { parameters: [ { name: 'project', - value: 'ml-pipeline' + value: 'ml-pipeline', }, { name: 'region', - value: 'us-central1' + value: 'us-central1', }, { name: 'cluster', - value: 'xgboost-spark-xgboost-training-gzkm9' + value: 'xgboost-spark-xgboost-training-gzkm9', }, { name: 'output', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/batchpredict' + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/batchpredict', }, { name: 'eval', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/transform/eval/part-*' + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/transform/eval/part-*', }, { name: 'model', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/model' + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/model', }, { name: 'target', - value: 'news_label' + value: 'news_label', }, { name: 'package', - // tslint:disable-next-line:max-line-length - value: 'gs://ml-pipeline-playground/xgboost4j-example-0.8-SNAPSHOT-jar-with-dependencies.jar' + // tslint:disable-next-line:max-line-length + value: + 'gs://ml-pipeline-playground/xgboost4j-example-0.8-SNAPSHOT-jar-with-dependencies.jar', }, { name: 'analysis', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/analysis' - } - ] + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/analysis', + }, + ], }, - children: [ - 'xgboost-training-gzkm9-916635920' - ] + children: ['xgboost-training-gzkm9-916635920'], }, 'xgboost-training-gzkm9-2203328319': { id: 'xgboost-training-gzkm9-2203328319', @@ -890,26 +865,28 @@ export default { parameters: [ { name: 'analysis', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/analysis' + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/analysis', }, { name: 'predictions', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/batchpredict/part-*.csv' + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/batchpredict/part-*.csv', }, { name: 'target', - value: 'news_label' - } - ] + value: 'news_label', + }, + ], }, outputs: { - artifacts: [{ - name: 'mlpipeline-ui-metadata', - s3: { - bucket: 'somebucket', - key: 'confusionmatrix', + artifacts: [ + { + name: 'mlpipeline-ui-metadata', + s3: { + bucket: 'somebucket', + key: 'confusionmatrix', + }, }, - }], + ], }, }, 'xgboost-training-gzkm9-2365787662': { @@ -926,63 +903,64 @@ export default { parameters: [ { name: 'project', - value: 'ml-pipeline' + value: 'ml-pipeline', }, { name: 'region', - value: 'us-central1' + value: 'us-central1', }, { name: 'cluster', - value: 'xgboost-spark-xgboost-training-gzkm9' + value: 'xgboost-spark-xgboost-training-gzkm9', }, { name: 'train', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/transform/train/part-*' + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/transform/train/part-*', }, { name: 'eval', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/transform/eval/part-*' + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/transform/eval/part-*', }, { name: 'target', - value: 'news_label' + value: 'news_label', }, { name: 'analysis', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/analysis' + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/analysis', }, { name: 'package', - // tslint:disable-next-line:max-line-length - value: 'gs://ml-pipeline-playground/xgboost4j-example-0.8-SNAPSHOT-jar-with-dependencies.jar' + // tslint:disable-next-line:max-line-length + value: + 'gs://ml-pipeline-playground/xgboost4j-example-0.8-SNAPSHOT-jar-with-dependencies.jar', }, { name: 'workers', - value: '2' + value: '2', }, { name: 'rounds', - value: '200' + value: '200', }, { name: 'conf', - value: 'gs://ml-pipeline-playground/trainconfcla.json ' - } - ] + value: 'gs://ml-pipeline-playground/trainconfcla.json ', + }, + ], }, outputs: { - artifacts: [{ - name: 'mlpipeline-ui-metadata', - s3: { - bucket: 'somebucket', - key: 'model', + artifacts: [ + { + name: 'mlpipeline-ui-metadata', + s3: { + bucket: 'somebucket', + key: 'model', + }, }, - }], + ], }, - children: [ - 'xgboost-training-gzkm9-4204798981' - ] + children: ['xgboost-training-gzkm9-4204798981'], }, 'xgboost-training-gzkm9-2411879589': { id: 'xgboost-training-gzkm9-2411879589', @@ -998,25 +976,23 @@ export default { parameters: [ { name: 'project', - value: 'ml-pipeline' + value: 'ml-pipeline', }, { name: 'region', - value: 'us-central1' + value: 'us-central1', }, { name: 'name', - value: 'xgboost-spark-xgboost-training-gzkm9' + value: 'xgboost-spark-xgboost-training-gzkm9', }, { name: 'staging', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/staging' - } - ] + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/staging', + }, + ], }, - children: [ - 'xgboost-training-gzkm9-916047540' - ] + children: ['xgboost-training-gzkm9-916047540'], }, 'xgboost-training-gzkm9-2457131397': { id: 'xgboost-training-gzkm9-2457131397', @@ -1032,46 +1008,46 @@ export default { parameters: [ { name: 'project', - value: 'ml-pipeline' + value: 'ml-pipeline', }, { name: 'region', - value: 'us-central1' + value: 'us-central1', }, { name: 'cluster', - value: 'xgboost-spark-xgboost-training-gzkm9' + value: 'xgboost-spark-xgboost-training-gzkm9', }, { name: 'train', - value: 'gs://ml-pipeline-playground/newsgroup/train.csv' + value: 'gs://ml-pipeline-playground/newsgroup/train.csv', }, { name: 'eval', - value: 'gs://ml-pipeline-playground/newsgroup/eval.csv' + value: 'gs://ml-pipeline-playground/newsgroup/eval.csv', }, { name: 'target', - value: 'news_label' + value: 'news_label', }, { name: 'analysis', - value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/analysis' - } - ] + value: 'gs://sample-xgbbost-cm-output/xgboost-training-gzkm9/analysis', + }, + ], }, outputs: { - artifacts: [{ - name: 'mlpipeline-ui-metadata', - s3: { - bucket: 'somebucket', - key: 'transform', + artifacts: [ + { + name: 'mlpipeline-ui-metadata', + s3: { + bucket: 'somebucket', + key: 'transform', + }, }, - }], + ], }, - children: [ - 'xgboost-training-gzkm9-982760658' - ] + children: ['xgboost-training-gzkm9-982760658'], }, 'xgboost-training-gzkm9-3439262870': { id: 'xgboost-training-gzkm9-3439262870', @@ -1082,9 +1058,7 @@ export default { boundaryID: 'xgboost-training-gzkm9-1253553084', startedAt: '2018-04-17T00:10:06Z', finishedAt: '2018-04-17T00:12:01Z', - children: [ - 'xgboost-training-gzkm9-3721733163' - ] + children: ['xgboost-training-gzkm9-3721733163'], }, 'xgboost-training-gzkm9-3636935406': { id: 'xgboost-training-gzkm9-3636935406', @@ -1100,38 +1074,38 @@ export default { parameters: [ { name: 'project', - value: 'ml-pipeline' + value: 'ml-pipeline', }, { name: 'region', - value: 'us-central1' + value: 'us-central1', }, { name: 'cluster', - value: 'xgboost-spark-xgboost-training-gzkm9' + value: 'xgboost-spark-xgboost-training-gzkm9', }, { name: 'train', - value: 'gs://ml-pipeline-playground/newsgroup/train.csv' + value: 'gs://ml-pipeline-playground/newsgroup/train.csv', }, { name: 'schema', - value: 'gs://ml-pipeline-playground/newsgroup/schema.json' - } - ] + value: 'gs://ml-pipeline-playground/newsgroup/schema.json', + }, + ], }, outputs: { - artifacts: [{ - name: 'mlpipeline-ui-metadata', - s3: { - bucket: 'somebucket', - key: 'analysis', + artifacts: [ + { + name: 'mlpipeline-ui-metadata', + s3: { + bucket: 'somebucket', + key: 'analysis', + }, }, - }], + ], }, - children: [ - 'xgboost-training-gzkm9-915503087' - ] + children: ['xgboost-training-gzkm9-915503087'], }, 'xgboost-training-gzkm9-3721733163': { id: 'xgboost-training-gzkm9-3721733163', @@ -1147,18 +1121,18 @@ export default { parameters: [ { name: 'project', - value: 'ml-pipeline' + value: 'ml-pipeline', }, { name: 'region', - value: 'us-central1' + value: 'us-central1', }, { name: 'name', - value: 'xgboost-spark-xgboost-training-gzkm9' - } - ] - } + value: 'xgboost-spark-xgboost-training-gzkm9', + }, + ], + }, }, 'xgboost-training-gzkm9-4204210601': { id: 'xgboost-training-gzkm9-4204210601', @@ -1169,9 +1143,7 @@ export default { boundaryID: 'xgboost-training-gzkm9', startedAt: '2018-04-16T23:37:48Z', finishedAt: '2018-04-16T23:39:56Z', - children: [ - 'xgboost-training-gzkm9-2411879589' - ] + children: ['xgboost-training-gzkm9-2411879589'], }, 'xgboost-training-gzkm9-4204798981': { id: 'xgboost-training-gzkm9-4204798981', @@ -1182,9 +1154,7 @@ export default { boundaryID: 'xgboost-training-gzkm9', startedAt: '2018-04-17T00:08:06Z', finishedAt: '2018-04-17T00:08:59Z', - children: [ - 'xgboost-training-gzkm9-1761585008' - ] + children: ['xgboost-training-gzkm9-1761585008'], }, 'xgboost-training-gzkm9-915503087': { id: 'xgboost-training-gzkm9-915503087', @@ -1195,9 +1165,7 @@ export default { boundaryID: 'xgboost-training-gzkm9', startedAt: '2018-04-16T23:41:24Z', finishedAt: '2018-04-16T23:49:28Z', - children: [ - 'xgboost-training-gzkm9-2457131397' - ] + children: ['xgboost-training-gzkm9-2457131397'], }, 'xgboost-training-gzkm9-916047540': { id: 'xgboost-training-gzkm9-916047540', @@ -1208,9 +1176,7 @@ export default { boundaryID: 'xgboost-training-gzkm9', startedAt: '2018-04-16T23:39:56Z', finishedAt: '2018-04-16T23:41:24Z', - children: [ - 'xgboost-training-gzkm9-3636935406' - ] + children: ['xgboost-training-gzkm9-3636935406'], }, 'xgboost-training-gzkm9-916635920': { id: 'xgboost-training-gzkm9-916635920', @@ -1221,9 +1187,7 @@ export default { boundaryID: 'xgboost-training-gzkm9', startedAt: '2018-04-17T00:08:59Z', finishedAt: '2018-04-17T00:10:06Z', - children: [ - 'xgboost-training-gzkm9-2203328319' - ] + children: ['xgboost-training-gzkm9-2203328319'], }, 'xgboost-training-gzkm9-982760658': { id: 'xgboost-training-gzkm9-982760658', @@ -1234,10 +1198,8 @@ export default { boundaryID: 'xgboost-training-gzkm9', startedAt: '2018-04-16T23:49:28Z', finishedAt: '2018-04-17T00:08:06Z', - children: [ - 'xgboost-training-gzkm9-2365787662' - ] - } - } - } + children: ['xgboost-training-gzkm9-2365787662'], + }, + }, + }, }; diff --git a/frontend/mock-backend/mock-xgboost-small-runtime.ts b/frontend/mock-backend/mock-xgboost-small-runtime.ts index d30db340133..650a70f0776 100644 --- a/frontend/mock-backend/mock-xgboost-small-runtime.ts +++ b/frontend/mock-backend/mock-xgboost-small-runtime.ts @@ -17,7 +17,8 @@ export default { metadata: { name: 'job-xgboosttrainingm7t2r-1-2537408167', namespace: 'default', - selfLink: '/apis/argoproj.io/v1alpha1/namespaces/default/workflows/job-xgboosttrainingm7t2r-1-2537408167', + selfLink: + '/apis/argoproj.io/v1alpha1/namespaces/default/workflows/job-xgboosttrainingm7t2r-1-2537408167', uid: '3333210c-cdef-11e8-8c17-42010a8a0078', resourceVersion: '24210', creationTimestamp: '2018-10-12T07:19:47Z', @@ -26,7 +27,7 @@ export default { 'scheduledworkflows.kubeflow.org/scheduledWorkflowName': 'job-xgboosttrainingm7t2r', 'scheduledworkflows.kubeflow.org/workflowEpoch': '1539328777', 'scheduledworkflows.kubeflow.org/workflowIndex': '1', - 'workflows.argoproj.io/phase': 'Running' + 'workflows.argoproj.io/phase': 'Running', }, ownerReferences: [ { @@ -35,9 +36,9 @@ export default { name: 'job-xgboosttrainingm7t2r', uid: '2d3b0ed1-cdef-11e8-8c17-42010a8a0078', controller: true, - blockOwnerDeletion: true - } - ] + blockOwnerDeletion: true, + }, + ], }, spec: { templates: [ @@ -46,25 +47,25 @@ export default { inputs: { parameters: [ { - name: 'create-cluster-output' + name: 'create-cluster-output', }, { - name: 'output' + name: 'output', }, { - name: 'project' - } - ] + name: 'project', + }, + ], }, outputs: { parameters: [ { name: 'analyze-output', valueFrom: { - path: '/output.txt' - } - } - ] + path: '/output.txt', + }, + }, + ], }, metadata: {}, container: { @@ -82,22 +83,22 @@ export default { '--train', 'gs://ml-pipeline-playground/sfpd/train.csv', '--output', - '{{inputs.parameters.output}}/{{workflow.name}}/analysis' + '{{inputs.parameters.output}}/{{workflow.name}}/analysis', ], - resources: {} - } + resources: {}, + }, }, { name: 'confusion-matrix', inputs: { parameters: [ { - name: 'output' + name: 'output', }, { - name: 'predict-output' - } - ] + name: 'predict-output', + }, + ], }, outputs: {}, metadata: {}, @@ -108,32 +109,32 @@ export default { '--output', '{{inputs.parameters.output}}/{{workflow.name}}/confusionmatrix', '--predictions', - '{{inputs.parameters.predict-output}}' + '{{inputs.parameters.predict-output}}', ], - resources: {} - } + resources: {}, + }, }, { name: 'create-cluster', inputs: { parameters: [ { - name: 'output' + name: 'output', }, { - name: 'project' - } - ] + name: 'project', + }, + ], }, outputs: { parameters: [ { name: 'create-cluster-output', valueFrom: { - path: '/output.txt' - } - } - ] + path: '/output.txt', + }, + }, + ], }, metadata: {}, container: { @@ -147,19 +148,19 @@ export default { '--name', 'xgb-{{workflow.name}}', '--staging', - '{{inputs.parameters.output}}' + '{{inputs.parameters.output}}', ], - resources: {} - } + resources: {}, + }, }, { name: 'delete-cluster', inputs: { parameters: [ { - name: 'project' - } - ] + name: 'project', + }, + ], }, outputs: {}, metadata: {}, @@ -172,22 +173,22 @@ export default { '--region', 'us-central1', '--name', - 'xgb-{{workflow.name}}' + 'xgb-{{workflow.name}}', ], - resources: {} - } + resources: {}, + }, }, { name: 'exit-handler-1', inputs: { parameters: [ { - name: 'output' + name: 'output', }, { - name: 'project' - } - ] + name: 'project', + }, + ], }, outputs: {}, metadata: {}, @@ -200,21 +201,19 @@ export default { parameters: [ { name: 'create-cluster-output', - value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}' + value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}', }, { name: 'output', - value: '{{inputs.parameters.output}}' + value: '{{inputs.parameters.output}}', }, { name: 'project', - value: '{{inputs.parameters.project}}' - } - ] + value: '{{inputs.parameters.project}}', + }, + ], }, - dependencies: [ - 'create-cluster' - ] + dependencies: ['create-cluster'], }, { name: 'confusion-matrix', @@ -223,17 +222,15 @@ export default { parameters: [ { name: 'output', - value: '{{inputs.parameters.output}}' + value: '{{inputs.parameters.output}}', }, { name: 'predict-output', - value: '{{tasks.predict.outputs.parameters.predict-output}}' - } - ] + value: '{{tasks.predict.outputs.parameters.predict-output}}', + }, + ], }, - dependencies: [ - 'predict' - ] + dependencies: ['predict'], }, { name: 'create-cluster', @@ -242,14 +239,14 @@ export default { parameters: [ { name: 'output', - value: '{{inputs.parameters.output}}' + value: '{{inputs.parameters.output}}', }, { name: 'project', - value: '{{inputs.parameters.project}}' - } - ] - } + value: '{{inputs.parameters.project}}', + }, + ], + }, }, { name: 'predict', @@ -258,36 +255,31 @@ export default { parameters: [ { name: 'analyze-output', - value: '{{tasks.analyze.outputs.parameters.analyze-output}}' + value: '{{tasks.analyze.outputs.parameters.analyze-output}}', }, { name: 'create-cluster-output', - value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}' + value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}', }, { name: 'output', - value: '{{inputs.parameters.output}}' + value: '{{inputs.parameters.output}}', }, { name: 'project', - value: '{{inputs.parameters.project}}' + value: '{{inputs.parameters.project}}', }, { name: 'train-output', - value: '{{tasks.train.outputs.parameters.train-output}}' + value: '{{tasks.train.outputs.parameters.train-output}}', }, { name: 'transform-eval', - value: '{{tasks.transform.outputs.parameters.transform-eval}}' - } - ] + value: '{{tasks.transform.outputs.parameters.transform-eval}}', + }, + ], }, - dependencies: [ - 'analyze', - 'create-cluster', - 'train', - 'transform' - ] + dependencies: ['analyze', 'create-cluster', 'train', 'transform'], }, { name: 'roc', @@ -296,17 +288,15 @@ export default { parameters: [ { name: 'output', - value: '{{inputs.parameters.output}}' + value: '{{inputs.parameters.output}}', }, { name: 'predict-output', - value: '{{tasks.predict.outputs.parameters.predict-output}}' - } - ] + value: '{{tasks.predict.outputs.parameters.predict-output}}', + }, + ], }, - dependencies: [ - 'predict' - ] + dependencies: ['predict'], }, { name: 'train', @@ -315,35 +305,31 @@ export default { parameters: [ { name: 'analyze-output', - value: '{{tasks.analyze.outputs.parameters.analyze-output}}' + value: '{{tasks.analyze.outputs.parameters.analyze-output}}', }, { name: 'create-cluster-output', - value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}' + value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}', }, { name: 'output', - value: '{{inputs.parameters.output}}' + value: '{{inputs.parameters.output}}', }, { name: 'project', - value: '{{inputs.parameters.project}}' + value: '{{inputs.parameters.project}}', }, { name: 'transform-eval', - value: '{{tasks.transform.outputs.parameters.transform-eval}}' + value: '{{tasks.transform.outputs.parameters.transform-eval}}', }, { name: 'transform-train', - value: '{{tasks.transform.outputs.parameters.transform-train}}' - } - ] + value: '{{tasks.transform.outputs.parameters.transform-train}}', + }, + ], }, - dependencies: [ - 'analyze', - 'create-cluster', - 'transform' - ] + dependencies: ['analyze', 'create-cluster', 'transform'], }, { name: 'transform', @@ -352,63 +338,60 @@ export default { parameters: [ { name: 'analyze-output', - value: '{{tasks.analyze.outputs.parameters.analyze-output}}' + value: '{{tasks.analyze.outputs.parameters.analyze-output}}', }, { name: 'create-cluster-output', - value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}' + value: '{{tasks.create-cluster.outputs.parameters.create-cluster-output}}', }, { name: 'output', - value: '{{inputs.parameters.output}}' + value: '{{inputs.parameters.output}}', }, { name: 'project', - value: '{{inputs.parameters.project}}' - } - ] + value: '{{inputs.parameters.project}}', + }, + ], }, - dependencies: [ - 'analyze', - 'create-cluster' - ] - } - ] - } + dependencies: ['analyze', 'create-cluster'], + }, + ], + }, }, { name: 'predict', inputs: { parameters: [ { - name: 'analyze-output' + name: 'analyze-output', }, { - name: 'create-cluster-output' + name: 'create-cluster-output', }, { - name: 'output' + name: 'output', }, { - name: 'project' + name: 'project', }, { - name: 'train-output' + name: 'train-output', }, { - name: 'transform-eval' - } - ] + name: 'transform-eval', + }, + ], }, outputs: { parameters: [ { name: 'predict-output', valueFrom: { - path: '/output.txt' - } - } - ] + path: '/output.txt', + }, + }, + ], }, metadata: {}, container: { @@ -432,22 +415,22 @@ export default { '--model', '{{inputs.parameters.train-output}}', '--output', - '{{inputs.parameters.output}}/{{workflow.name}}/predict' + '{{inputs.parameters.output}}/{{workflow.name}}/predict', ], - resources: {} - } + resources: {}, + }, }, { name: 'roc', inputs: { parameters: [ { - name: 'output' + name: 'output', }, { - name: 'predict-output' - } - ] + name: 'predict-output', + }, + ], }, outputs: {}, metadata: {}, @@ -460,44 +443,44 @@ export default { '--predictions', '{{inputs.parameters.predict-output}}', '--trueclass', - 'ACTION' + 'ACTION', ], - resources: {} - } + resources: {}, + }, }, { name: 'train', inputs: { parameters: [ { - name: 'analyze-output' + name: 'analyze-output', }, { - name: 'create-cluster-output' + name: 'create-cluster-output', }, { - name: 'output' + name: 'output', }, { - name: 'project' + name: 'project', }, { - name: 'transform-eval' + name: 'transform-eval', }, { - name: 'transform-train' - } - ] + name: 'transform-train', + }, + ], }, outputs: { parameters: [ { name: 'train-output', valueFrom: { - path: '/output.txt' - } - } - ] + path: '/output.txt', + }, + }, + ], }, metadata: {}, container: { @@ -527,44 +510,44 @@ export default { '--conf', 'gs://ml-pipeline-playground/trainconfcla.json', '--output', - '{{inputs.parameters.output}}/{{workflow.name}}/model' + '{{inputs.parameters.output}}/{{workflow.name}}/model', ], - resources: {} - } + resources: {}, + }, }, { name: 'transform', inputs: { parameters: [ { - name: 'analyze-output' + name: 'analyze-output', }, { - name: 'create-cluster-output' + name: 'create-cluster-output', }, { - name: 'output' + name: 'output', }, { - name: 'project' - } - ] + name: 'project', + }, + ], }, outputs: { parameters: [ { name: 'transform-eval', valueFrom: { - path: '/output_eval.txt' - } + path: '/output_eval.txt', + }, }, { name: 'transform-train', valueFrom: { - path: '/output_train.txt' - } - } - ] + path: '/output_train.txt', + }, + }, + ], }, metadata: {}, container: { @@ -586,22 +569,22 @@ export default { '--target', 'resolution', '--output', - '{{inputs.parameters.output}}/{{workflow.name}}/transform' + '{{inputs.parameters.output}}/{{workflow.name}}/transform', ], - resources: {} - } + resources: {}, + }, }, { name: 'xgboosttrainer', inputs: { parameters: [ { - name: 'output' + name: 'output', }, { - name: 'project' - } - ] + name: 'project', + }, + ], }, outputs: {}, metadata: {}, @@ -614,65 +597,65 @@ export default { parameters: [ { name: 'output', - value: '{{inputs.parameters.output}}' + value: '{{inputs.parameters.output}}', }, { name: 'project', - value: '{{inputs.parameters.project}}' - } - ] - } - } - ] - } - } + value: '{{inputs.parameters.project}}', + }, + ], + }, + }, + ], + }, + }, ], entrypoint: 'xgboosttrainer', arguments: { parameters: [ { name: 'output', - value: 'gs://yelsayed-2/xgboost' + value: 'gs://yelsayed-2/xgboost', }, { name: 'project', - value: 'yelsayed-2' + value: 'yelsayed-2', }, { name: 'region', - value: 'us-central1' + value: 'us-central1', }, { name: 'train-data', - value: 'gs://ml-pipeline-playground/sfpd/train.csv' + value: 'gs://ml-pipeline-playground/sfpd/train.csv', }, { name: 'eval-data', - value: 'gs://ml-pipeline-playground/sfpd/eval.csv' + value: 'gs://ml-pipeline-playground/sfpd/eval.csv', }, { name: 'schema', - value: 'gs://ml-pipeline-playground/sfpd/schema.json' + value: 'gs://ml-pipeline-playground/sfpd/schema.json', }, { name: 'target', - value: 'resolution' + value: 'resolution', }, { name: 'rounds', - value: '200' + value: '200', }, { name: 'workers', - value: '2' + value: '2', }, { name: 'true-label', - value: 'ACTION' - } - ] + value: 'ACTION', + }, + ], }, - onExit: 'delete-cluster' + onExit: 'delete-cluster', }, status: { phase: 'Running', @@ -692,17 +675,15 @@ export default { parameters: [ { name: 'output', - value: 'gs://yelsayed-2/xgboost' + value: 'gs://yelsayed-2/xgboost', }, { name: 'project', - value: 'yelsayed-2' - } - ] + value: 'yelsayed-2', + }, + ], }, - children: [ - 'job-xgboosttrainingm7t2r-1-2537408167-3348277322' - ] + children: ['job-xgboosttrainingm7t2r-1-2537408167-3348277322'], }, 'job-xgboosttrainingm7t2r-1-2537408167-294182655': { id: 'job-xgboosttrainingm7t2r-1-2537408167-294182655', @@ -712,21 +693,22 @@ export default { templateName: 'create-cluster', phase: 'Pending', boundaryID: 'job-xgboosttrainingm7t2r-1-2537408167-3348277322', - message: 'ImagePullBackOff: Back-off pulling image "gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster"', + message: + 'ImagePullBackOff: Back-off pulling image "gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster"', startedAt: '2018-10-12T07:19:47Z', finishedAt: null, inputs: { parameters: [ { name: 'output', - value: 'gs://yelsayed-2/xgboost' + value: 'gs://yelsayed-2/xgboost', }, { name: 'project', - value: 'yelsayed-2' - } - ] - } + value: 'yelsayed-2', + }, + ], + }, }, 'job-xgboosttrainingm7t2r-1-2537408167-3348277322': { id: 'job-xgboosttrainingm7t2r-1-2537408167-3348277322', @@ -742,18 +724,16 @@ export default { parameters: [ { name: 'output', - value: 'gs://yelsayed-2/xgboost' + value: 'gs://yelsayed-2/xgboost', }, { name: 'project', - value: 'yelsayed-2' - } - ] - }, - children: [ - 'job-xgboosttrainingm7t2r-1-2537408167-294182655' - ] - } - } - } + value: 'yelsayed-2', + }, + ], + }, + children: ['job-xgboosttrainingm7t2r-1-2537408167-294182655'], + }, + }, + }, }; diff --git a/frontend/mock-backend/proxy-middleware.ts b/frontend/mock-backend/proxy-middleware.ts new file mode 100644 index 00000000000..0a4cdf8b111 --- /dev/null +++ b/frontend/mock-backend/proxy-middleware.ts @@ -0,0 +1,85 @@ +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import * as express from 'express'; +import proxy from 'http-proxy-middleware'; +import { URL, URLSearchParams } from 'url'; + +export function _extractUrlFromReferer(proxyPrefix: string, referer = ''): string { + const index = referer.indexOf(proxyPrefix); + return index > -1 ? referer.substr(index + proxyPrefix.length) : ''; +} + +export function _trimProxyPrefix(proxyPrefix: string, path: string): string { + return path.indexOf(proxyPrefix) === 0 ? (path = path.substr(proxyPrefix.length)) : path; +} + +export function _routePathWithReferer(proxyPrefix: string, path: string, referer = ''): string { + // If a referer header is included, extract the referer URL, otherwise + // just trim out the /_proxy/ prefix. Use the origin of the resulting URL. + const proxiedUrlInReferer = _extractUrlFromReferer(proxyPrefix, referer); + let decodedPath = decodeURIComponent(proxiedUrlInReferer || _trimProxyPrefix(proxyPrefix, path)); + if (!decodedPath.startsWith('http://') && !decodedPath.startsWith('https://')) { + decodedPath = 'http://' + decodedPath; + } + return new URL(decodedPath).origin; +} + +export function _rewritePath(proxyPrefix: string, path: string, query: string): string { + // Trim the proxy prefix if exists. It won't exist for any requests made + // to absolute paths by the proxied resource. + const querystring = new URLSearchParams(query).toString(); + const decodedPath = decodeURIComponent(path); + return _trimProxyPrefix(proxyPrefix, decodedPath) + (querystring && '?' + querystring); +} + +export default (app: express.Application, apisPrefix: string) => { + const proxyPrefix = apisPrefix + '/_proxy/'; + + app.use((req, _, next) => { + // For any request that has a proxy referer header but no proxy prefix, + // prepend the proxy prefix to it and redirect. + if (req.headers.referer) { + const refererUrl = _extractUrlFromReferer(proxyPrefix, req.headers.referer as string); + if (refererUrl && req.url.indexOf(proxyPrefix) !== 0) { + let proxiedUrl = decodeURIComponent( + _extractUrlFromReferer(proxyPrefix, req.headers.referer as string), + ); + if (!proxiedUrl.startsWith('http://') && !proxiedUrl.startsWith('https://')) { + proxiedUrl = 'http://' + proxiedUrl; + } + const proxiedOrigin = new URL(proxiedUrl).origin; + req.url = proxyPrefix + encodeURIComponent(proxiedOrigin + req.url); + } + } + next(); + }); + + app.all( + proxyPrefix + '*', + proxy({ + changeOrigin: true, + logLevel: 'debug', + target: 'http://127.0.0.1', + + router: (req: any) => { + return _routePathWithReferer(proxyPrefix, req.path, req.headers.referer as string); + }, + + pathRewrite: (_: any, req: any) => { + return _rewritePath(proxyPrefix, req.path, req.query); + }, + }), + ); +}; diff --git a/frontend/mock-backend/tsconfig.json b/frontend/mock-backend/tsconfig.json new file mode 100644 index 00000000000..a3a7b2aa4be --- /dev/null +++ b/frontend/mock-backend/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "allowSyntheticDefaultImports": true, + "baseUrl": ".", + "outDir": "build/dist", + "module": "esnext", + "target": "es5", + "lib": ["es6", "dom", "es2019"], + "sourceMap": true, + "allowJs": true, + "resolveJsonModule": true, + "jsx": "react", + "moduleResolution": "node", + "noImplicitReturns": true, + "noImplicitThis": true, + "noImplicitAny": true, + "strictBindCallApply": true, + "strictNullChecks": true, + "suppressImplicitAnyIndexErrors": true, + "skipLibCheck": true, + "esModuleInterop": true + }, + "exclude": ["dist", "coverage"] +} diff --git a/frontend/package-lock.json b/frontend/package-lock.json index d939c24ca74..b8a8d373a67 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -8,3581 +8,6515 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.0.0.tgz", "integrity": "sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA==", + "dev": true, "requires": { "@babel/highlight": "^7.0.0" } }, - "@babel/highlight": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.0.0.tgz", - "integrity": "sha512-UFMC4ZeFC48Tpvj7C8UgLvtkaUuovQX+5xNWrsIoMG8o2z+XFKjKaN9iVmS84dPwVN00W4wPmqvYoZF3EGAsfw==", + "@babel/compat-data": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.9.0.tgz", + "integrity": "sha512-zeFQrr+284Ekvd9e7KAX954LkapWiOmQtsfHirhxqfdlX6MEC32iRE+pqUGlYIBchdevaCwvzxWGSy/YBNI85g==", + "dev": true, "requires": { - "chalk": "^2.0.0", - "esutils": "^2.0.2", - "js-tokens": "^4.0.0" + "browserslist": "^4.9.1", + "invariant": "^2.2.4", + "semver": "^5.5.0" }, "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", + "browserslist": { + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.11.1.tgz", + "integrity": "sha512-DCTr3kDrKEYNw6Jb9HFxVLQNaue8z+0ZfRBRjmCunKDEXEBajKDj2Y+Uelg+Pi29OnvaSGwjOsnRyNEkXzHg5g==", + "dev": true, "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "caniuse-lite": "^1.0.30001038", + "electron-to-chromium": "^1.3.390", + "node-releases": "^1.1.53", + "pkg-up": "^2.0.0" } + }, + "caniuse-lite": { + "version": "1.0.30001039", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001039.tgz", + "integrity": "sha512-SezbWCTT34eyFoWHgx8UWso7YtvtM7oosmFoXbCkdC6qJzRfBTeTgE9REtKtiuKXuMwWTZEvdnFNGAyVMorv8Q==", + "dev": true + }, + "electron-to-chromium": { + "version": "1.3.399", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.399.tgz", + "integrity": "sha512-+NBhK0/v17pls7CSh3Cx5Ir3tsGmtLPMMAO4Nz272bre2wzdykLEsev5wjOd3rYMt2/kSS681ufFT7Dywxq1sw==", + "dev": true + }, + "node-releases": { + "version": "1.1.53", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.53.tgz", + "integrity": "sha512-wp8zyQVwef2hpZ/dJH7SfSrIPD6YoJz6BDQDpGEkcA0s3LpAQoxBIYmfIq6QAhC1DhwsyCgTaTTcONwX8qzCuQ==", + "dev": true } } }, - "@babel/polyfill": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/polyfill/-/polyfill-7.4.4.tgz", - "integrity": "sha512-WlthFLfhQQhh+A2Gn5NSFl0Huxz36x86Jn+E9OW7ibK8edKPq+KLy4apM1yDpQ8kJOVi1OVjpP4vSDLdrI04dg==", + "@babel/core": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.9.0.tgz", + "integrity": "sha512-kWc7L0fw1xwvI0zi8OKVBuxRVefwGOrKSQMvrQ3dW+bIIavBY3/NpXmpjMy7bQnLgwgzWQZ8TlM57YHpHNHz4w==", + "dev": true, "requires": { - "core-js": "^2.6.5", - "regenerator-runtime": "^0.13.2" + "@babel/code-frame": "^7.8.3", + "@babel/generator": "^7.9.0", + "@babel/helper-module-transforms": "^7.9.0", + "@babel/helpers": "^7.9.0", + "@babel/parser": "^7.9.0", + "@babel/template": "^7.8.6", + "@babel/traverse": "^7.9.0", + "@babel/types": "^7.9.0", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.1", + "json5": "^2.1.2", + "lodash": "^4.17.13", + "resolve": "^1.3.2", + "semver": "^5.4.1", + "source-map": "^0.5.0" }, "dependencies": { - "core-js": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.9.tgz", - "integrity": "sha512-HOpZf6eXmnl7la+cUdMnLvUxKNqLUzJvgIziQ0DiF3JwSImNphIqdGqzj6hIKyX04MmV0poclQ7+wjWvxQyR2A==" + "@babel/code-frame": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", + "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", + "dev": true, + "requires": { + "@babel/highlight": "^7.8.3" + } }, - "regenerator-runtime": { - "version": "0.13.3", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz", - "integrity": "sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw==" + "@babel/highlight": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.9.0.tgz", + "integrity": "sha512-lJZPilxX7Op3Nv/2cvFdnlepPXDxi29wxteT57Q965oc5R9v86ztx0jfxVrTcBk8C2kcPkkDa2Z4T3ZsPPVWsQ==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.0", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "json5": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.1.3.tgz", + "integrity": "sha512-KXPvOm8K9IJKFM0bmdn8QXh7udDh1g/giieX0NLCaMnb4hEiVFqnop2ImTXCc5e0/oHz3LTqmHGtExn5hfMkOA==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true } } }, - "@babel/runtime": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.2.0.tgz", - "integrity": "sha512-oouEibCbHMVdZSDlJBO6bZmID/zA/G/Qx3H1d3rSNPTD+L8UNKvCat7aKWSJ74zYbm5zWGh0GQN0hKj8zYFTCg==", + "@babel/generator": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.9.5.tgz", + "integrity": "sha512-GbNIxVB3ZJe3tLeDm1HSn2AhuD/mVcyLDpgtLXa5tplmWrJdF/elxB56XNqCuD6szyNkDi6wuoKXln3QeBmCHQ==", + "dev": true, "requires": { - "regenerator-runtime": "^0.12.0" + "@babel/types": "^7.9.5", + "jsesc": "^2.5.1", + "lodash": "^4.17.13", + "source-map": "^0.5.0" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@material-ui/core": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/@material-ui/core/-/core-3.7.1.tgz", - "integrity": "sha512-CjIGwvzn84BgzXWzC9M/Tz2gDI7AfUe3G1JXkZQAVy+ddPikh+iZwn5snnElfcjuC+ahXxaIyK49ARt3NM49vQ==", + "@babel/helper-annotate-as-pure": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.8.3.tgz", + "integrity": "sha512-6o+mJrZBxOoEX77Ezv9zwW7WV8DdluouRKNY/IR5u/YTMuKHgugHOzYWlYvYLpLA9nPsQCAAASpCIbjI9Mv+Uw==", + "dev": true, "requires": { - "@babel/runtime": "7.2.0", - "@material-ui/utils": "^3.0.0-alpha.1", - "@types/jss": "^9.5.6", - "@types/react-transition-group": "^2.0.8", - "brcast": "^3.0.1", - "classnames": "^2.2.5", - "csstype": "^2.5.2", - "debounce": "^1.1.0", - "deepmerge": "^3.0.0", - "dom-helpers": "^3.2.1", - "hoist-non-react-statics": "^3.2.1", - "is-plain-object": "^2.0.4", - "jss": "^9.8.7", - "jss-camel-case": "^6.0.0", - "jss-default-unit": "^8.0.2", - "jss-global": "^3.0.0", - "jss-nested": "^6.0.1", - "jss-props-sort": "^6.0.0", - "jss-vendor-prefixer": "^7.0.0", - "keycode": "^2.1.9", - "normalize-scroll-left": "^0.1.2", - "popper.js": "^1.14.1", - "prop-types": "^15.6.0", - "react-event-listener": "^0.6.2", - "react-transition-group": "^2.2.1", - "recompose": "0.28.0 - 0.30.0", - "warning": "^4.0.1" + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@material-ui/icons": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@material-ui/icons/-/icons-3.0.1.tgz", - "integrity": "sha512-1kNcxYiIT1x8iDPEAlgmKrfRTIV8UyK6fLVcZ9kMHIKGWft9I451V5mvSrbCjbf7MX1TbLWzZjph0aVCRf9MqQ==", + "@babel/helper-builder-binary-assignment-operator-visitor": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.8.3.tgz", + "integrity": "sha512-5eFOm2SyFPK4Rh3XMMRDjN7lBH0orh3ss0g3rTYZnBQ+r6YPj7lgDyCvPphynHvUrobJmeMignBr6Acw9mAPlw==", + "dev": true, "requires": { - "@babel/runtime": "7.0.0", - "recompose": "^0.29.0" + "@babel/helper-explode-assignable-expression": "^7.8.3", + "@babel/types": "^7.8.3" }, "dependencies": { - "@babel/runtime": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.0.0.tgz", - "integrity": "sha512-7hGhzlcmg01CvH1EHdSPVXYX1aJ8KCEyz6I9xYIi/asDtzBPMyMhVibhM/K6g/5qnKBwjZtp10bNZIEFTRW1MA==", - "requires": { - "regenerator-runtime": "^0.12.0" - } - }, - "hoist-non-react-statics": { - "version": "2.5.5", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-2.5.5.tgz", - "integrity": "sha512-rqcy4pJo55FTTLWt+bU8ukscqHeE/e9KWvsOW2b/a3afxQZhwkQdT1rPPCJ0rYXdj4vNcasY8zHTH+jF/qStxw==" - }, - "recompose": { - "version": "0.29.0", - "resolved": "https://registry.npmjs.org/recompose/-/recompose-0.29.0.tgz", - "integrity": "sha512-J/qLXNU4W+AeHCDR70ajW8eMd1uroqZaECTj6qqDLPMILz3y0EzpYlvrnxKB9DnqcngWrtGwjXY9JeXaW9kS1A==", + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, "requires": { - "@babel/runtime": "^7.0.0", - "change-emitter": "^0.1.2", - "fbjs": "^0.8.1", - "hoist-non-react-statics": "^2.3.1", - "react-lifecycles-compat": "^3.0.2", - "symbol-observable": "^1.0.4" + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" } } } }, - "@material-ui/utils": { - "version": "3.0.0-alpha.2", - "resolved": "https://registry.npmjs.org/@material-ui/utils/-/utils-3.0.0-alpha.2.tgz", - "integrity": "sha512-dnxwCXMSLFFXiQm3EK/Ikxm4oYvr3WxxVoPZ0Uh4CBqZNr0J8nzDNlDcGP/0UeC134UUz2ZwgbTL/smxrGZ8sg==", + "@babel/helper-builder-react-jsx": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-react-jsx/-/helper-builder-react-jsx-7.9.0.tgz", + "integrity": "sha512-weiIo4gaoGgnhff54GQ3P5wsUQmnSwpkvU0r6ZHq6TzoSzKy4JxHEgnxNytaKbov2a9z/CVNyzliuCOUPEX3Jw==", + "dev": true, "requires": { - "@babel/runtime": "7.2.0", - "prop-types": "^15.6.0", - "react-is": "^16.6.3" + "@babel/helper-annotate-as-pure": "^7.8.3", + "@babel/types": "^7.9.0" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/body-parser": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.17.0.tgz", - "integrity": "sha512-a2+YeUjPkztKJu5aIF2yArYFQQp8d51wZ7DavSHjFuY1mqVgidGyzEQ41JIVNy82fXj8yPgy2vJmfIywgESW6w==", + "@babel/helper-builder-react-jsx-experimental": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-react-jsx-experimental/-/helper-builder-react-jsx-experimental-7.9.5.tgz", + "integrity": "sha512-HAagjAC93tk748jcXpZ7oYRZH485RCq/+yEv9SIWezHRPv9moZArTnkUNciUNzvwHUABmiWKlcxJvMcu59UwTg==", "dev": true, "requires": { - "@types/connect": "*", - "@types/node": "*" + "@babel/helper-annotate-as-pure": "^7.8.3", + "@babel/helper-module-imports": "^7.8.3", + "@babel/types": "^7.9.5" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/cheerio": { - "version": "0.22.10", - "resolved": "https://registry.npmjs.org/@types/cheerio/-/cheerio-0.22.10.tgz", - "integrity": "sha512-fOM/Jhv51iyugY7KOBZz2ThfT1gwvsGCfWxpLpZDgkGjpEO4Le9cld07OdskikLjDUQJ43dzDaVRSFwQlpdqVg==", - "dev": true - }, - "@types/connect": { - "version": "3.4.32", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.32.tgz", - "integrity": "sha512-4r8qa0quOvh7lGD0pre62CAb1oni1OO6ecJLGCezTmhQ8Fz50Arx9RUszryR8KlgK6avuSXvviL6yWyViQABOg==", + "@babel/helper-compilation-targets": { + "version": "7.8.7", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.8.7.tgz", + "integrity": "sha512-4mWm8DCK2LugIS+p1yArqvG1Pf162upsIsjE7cNBjez+NjliQpVhj20obE520nao0o14DaTnFJv+Fw5a0JpoUw==", "dev": true, "requires": { - "@types/node": "*" + "@babel/compat-data": "^7.8.6", + "browserslist": "^4.9.1", + "invariant": "^2.2.4", + "levenary": "^1.1.1", + "semver": "^5.5.0" + }, + "dependencies": { + "browserslist": { + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.11.1.tgz", + "integrity": "sha512-DCTr3kDrKEYNw6Jb9HFxVLQNaue8z+0ZfRBRjmCunKDEXEBajKDj2Y+Uelg+Pi29OnvaSGwjOsnRyNEkXzHg5g==", + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30001038", + "electron-to-chromium": "^1.3.390", + "node-releases": "^1.1.53", + "pkg-up": "^2.0.0" + } + }, + "caniuse-lite": { + "version": "1.0.30001039", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001039.tgz", + "integrity": "sha512-SezbWCTT34eyFoWHgx8UWso7YtvtM7oosmFoXbCkdC6qJzRfBTeTgE9REtKtiuKXuMwWTZEvdnFNGAyVMorv8Q==", + "dev": true + }, + "electron-to-chromium": { + "version": "1.3.399", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.399.tgz", + "integrity": "sha512-+NBhK0/v17pls7CSh3Cx5Ir3tsGmtLPMMAO4Nz272bre2wzdykLEsev5wjOd3rYMt2/kSS681ufFT7Dywxq1sw==", + "dev": true + }, + "node-releases": { + "version": "1.1.53", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.53.tgz", + "integrity": "sha512-wp8zyQVwef2hpZ/dJH7SfSrIPD6YoJz6BDQDpGEkcA0s3LpAQoxBIYmfIq6QAhC1DhwsyCgTaTTcONwX8qzCuQ==", + "dev": true + } } }, - "@types/core-js": { - "version": "0.9.46", - "resolved": "https://registry.npmjs.org/@types/core-js/-/core-js-0.9.46.tgz", - "integrity": "sha512-LooLR6XHes9V+kNYRz1Qm8w3atw9QMn7XeZUmIpUelllF9BdryeUKd/u0Wh5ErcjpWfG39NrToU9MF7ngsTFVw==", - "dev": true - }, - "@types/d3": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/@types/d3/-/d3-5.5.0.tgz", - "integrity": "sha512-Bz9EAhWnaO93jLYSAT13blgzwP5Z0grO5THBOXSMeWHIIFHA7ntJSLpHSCr1kDtQunEZKCYT9OfE+4lYY/PwlA==", + "@babel/helper-create-class-features-plugin": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.9.5.tgz", + "integrity": "sha512-IipaxGaQmW4TfWoXdqjY0TzoXQ1HRS0kPpEgvjosb3u7Uedcq297xFqDQiCcQtRRwzIMif+N1MLVI8C5a4/PAA==", "dev": true, "requires": { - "@types/d3-array": "*", - "@types/d3-axis": "*", - "@types/d3-brush": "*", - "@types/d3-chord": "*", - "@types/d3-collection": "*", - "@types/d3-color": "*", - "@types/d3-contour": "*", - "@types/d3-dispatch": "*", - "@types/d3-drag": "*", - "@types/d3-dsv": "*", - "@types/d3-ease": "*", - "@types/d3-fetch": "*", - "@types/d3-force": "*", - "@types/d3-format": "*", - "@types/d3-geo": "*", - "@types/d3-hierarchy": "*", - "@types/d3-interpolate": "*", - "@types/d3-path": "*", - "@types/d3-polygon": "*", - "@types/d3-quadtree": "*", - "@types/d3-random": "*", - "@types/d3-scale": "*", - "@types/d3-scale-chromatic": "*", - "@types/d3-selection": "*", - "@types/d3-shape": "*", - "@types/d3-time": "*", - "@types/d3-time-format": "*", - "@types/d3-timer": "*", - "@types/d3-transition": "*", - "@types/d3-voronoi": "*", - "@types/d3-zoom": "*" + "@babel/helper-function-name": "^7.9.5", + "@babel/helper-member-expression-to-functions": "^7.8.3", + "@babel/helper-optimise-call-expression": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/helper-replace-supers": "^7.8.6", + "@babel/helper-split-export-declaration": "^7.8.3" } }, - "@types/d3-array": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-1.2.4.tgz", - "integrity": "sha512-3r1fOAAb+SGfcOGXty/LGvoP0ovMec4UtGNUyHOSzYyvSGpmt+eNMxLowol/3HryusevznSfcHZebEShXMwsZA==", - "dev": true - }, - "@types/d3-axis": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-1.0.11.tgz", - "integrity": "sha512-cuigApCyCwYJxaQPghj+BqaxzbdRdT/lpZBMtF7EuEIJ61NMQ8yvGnqFvHCIgJEmUu2Wb2wiZqy9kiHi3Ddftg==", + "@babel/helper-create-regexp-features-plugin": { + "version": "7.8.8", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.8.8.tgz", + "integrity": "sha512-LYVPdwkrQEiX9+1R29Ld/wTrmQu1SSKYnuOk3g0CkcZMA1p0gsNxJFj/3gBdaJ7Cg0Fnek5z0DsMULePP7Lrqg==", "dev": true, "requires": { - "@types/d3-selection": "*" + "@babel/helper-annotate-as-pure": "^7.8.3", + "@babel/helper-regex": "^7.8.3", + "regexpu-core": "^4.7.0" } }, - "@types/d3-brush": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-1.0.9.tgz", - "integrity": "sha512-mAx8IVc0luUHfk51pl0UN1vzybnAzLMUsvIwLt3fbsqqPkSXr+Pu1AxOPPeyNc27LhHJnfH/LCV7Jlv+Yzqu1A==", + "@babel/helper-define-map": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-define-map/-/helper-define-map-7.8.3.tgz", + "integrity": "sha512-PoeBYtxoZGtct3md6xZOCWPcKuMuk3IHhgxsRRNtnNShebf4C8YonTSblsK4tvDbm+eJAw2HAPOfCr+Q/YRG/g==", "dev": true, "requires": { - "@types/d3-selection": "*" - } - }, - "@types/d3-chord": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-1.0.8.tgz", - "integrity": "sha512-F0ftYOo7FenAIxsRjXLt8vbij0NLDuVcL+xaGY7R9jUmF2Mrpj1T5XukBI9Cad+Ei7YSxEWREIO+CYcaKCl2qQ==", - "dev": true - }, - "@types/d3-collection": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-collection/-/d3-collection-1.0.7.tgz", - "integrity": "sha512-vR3BT0GwHc5y93Jv6bxn3zoxP/vGu+GdXu/r1ApjbP9dLk9I2g6NiV7iP/QMQSuFZd0It0n/qWrfXHxCWwHIkg==", - "dev": true - }, - "@types/d3-color": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-1.2.1.tgz", - "integrity": "sha512-xwb1tqvYNWllbHuhMFhiXk63Imf+QNq/dJdmbXmr2wQVnwGenCuj3/0IWJ9hdIFQIqzvhT7T37cvx93jtAsDbQ==", - "dev": true + "@babel/helper-function-name": "^7.8.3", + "@babel/types": "^7.8.3", + "lodash": "^4.17.13" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } + } }, - "@types/d3-contour": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-1.3.0.tgz", - "integrity": "sha512-AUCUIjEnC5lCGBM9hS+MryRaFLIrPls4Rbv6ktqbd+TK/RXZPwOy9rtBWmGpbeXcSOYCJTUDwNJuEnmYPJRxHQ==", + "@babel/helper-explode-assignable-expression": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.8.3.tgz", + "integrity": "sha512-N+8eW86/Kj147bO9G2uclsg5pwfs/fqqY5rwgIL7eTBklgXjcOJ3btzS5iM6AitJcftnY7pm2lGsrJVYLGjzIw==", "dev": true, "requires": { - "@types/d3-array": "*", - "@types/geojson": "*" + "@babel/traverse": "^7.8.3", + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/d3-dispatch": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-1.0.6.tgz", - "integrity": "sha512-xyWJQMr832vqhu6fD/YqX+MSFBWnkxasNhcStvlhqygXxj0cKqPft0wuGoH5TIq5ADXgP83qeNVa4R7bEYN3uA==", - "dev": true - }, - "@types/d3-drag": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-1.2.2.tgz", - "integrity": "sha512-+UKFeaMVTfSQvMO0PTzOyLXSr7OZbF2Rx1iNVwo2XsyiOsd4MSuLyJKUwRmGn67044QpbNzr+VD6/8iBBLExWw==", + "@babel/helper-function-name": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.9.5.tgz", + "integrity": "sha512-JVcQZeXM59Cd1qanDUxv9fgJpt3NeKUaqBqUEvfmQ+BCOKq2xUgaWZW2hr0dkbyJgezYuplEoh5knmrnS68efw==", "dev": true, "requires": { - "@types/d3-selection": "*" + "@babel/helper-get-function-arity": "^7.8.3", + "@babel/template": "^7.8.3", + "@babel/types": "^7.9.5" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/d3-dsv": { - "version": "1.0.35", - "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-1.0.35.tgz", - "integrity": "sha512-QeH7cN9phcm68TDwpSGmzE71/JtGoKZ2rZJABNUMQ7nYIhHkm2UldqI1Cp2pjEo8ycSeutudjzq+Lfim/ZCadQ==", - "dev": true - }, - "@types/d3-ease": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-1.0.7.tgz", - "integrity": "sha1-k6MBhovp4VBh89RDQ7GrP4rLbwk=", - "dev": true - }, - "@types/d3-fetch": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-1.1.5.tgz", - "integrity": "sha512-o9c0ItT5/Gl3wbNuVpzRnYX1t3RghzeWAjHUVLuyZJudiTxC4f/fC0ZPFWLQ2lVY8pAMmxpV8TJ6ETYCgPeI3A==", + "@babel/helper-get-function-arity": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.8.3.tgz", + "integrity": "sha512-FVDR+Gd9iLjUMY1fzE2SR0IuaJToR4RkCDARVfsBBPSP53GEqSFjD8gNyxg246VUyc/ALRxFaAK8rVG7UT7xRA==", "dev": true, "requires": { - "@types/d3-dsv": "*" + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/d3-force": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-1.1.1.tgz", - "integrity": "sha512-ePkELuaFWY4yOuf+Bvx5Xd+ihFiYG4bdnW0BlvigovIm8Sob2t76e9RGO6lybQbv6AlW9Icn9HuZ9fmdzEoJyg==", - "dev": true - }, - "@types/d3-format": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-1.3.0.tgz", - "integrity": "sha512-ZiY4j3iJvAdOwzwW24WjlZbUNvqOsnPAMfPBmdXqxj3uKJbrzBlRrdGl5uC89pZpFs9Dc92E81KcwG2uEgkIZA==", - "dev": true - }, - "@types/d3-geo": { - "version": "1.10.3", - "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-1.10.3.tgz", - "integrity": "sha512-hfdaxM2L0wA9mDZrrSf2o+DyhEpnJYCiAN+lHFtpfZOVCQrYBA5g33sGRpUbAvjSMyO5jkHbftMWPEhuCMChSg==", + "@babel/helper-hoist-variables": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.8.3.tgz", + "integrity": "sha512-ky1JLOjcDUtSc+xkt0xhYff7Z6ILTAHKmZLHPxAhOP0Nd77O+3nCsd6uSVYur6nJnCI029CrNbYlc0LoPfAPQg==", "dev": true, "requires": { - "@types/geojson": "*" + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/d3-hierarchy": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-1.1.5.tgz", - "integrity": "sha512-DKhqURrURt2c7MsF9sHiF2wrWf2+yZR4Q9oIG026t/ZY4VWoM0Yd7UonaR+rygyReWcFSEjKC/+5A27TgD8R8g==", - "dev": true - }, - "@types/d3-interpolate": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-1.3.0.tgz", - "integrity": "sha512-Ng4ds7kPSvP/c3W3J5PPUQlgewif1tGBqCeh5lgY+UG82Y7H9zQ8c2gILsEFDLg7wRGOwnuKZ940Q/LSN14w9w==", + "@babel/helper-member-expression-to-functions": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.8.3.tgz", + "integrity": "sha512-fO4Egq88utkQFjbPrSHGmGLFqmrshs11d46WI+WZDESt7Wu7wN2G2Iu+NMMZJFDOVRHAMIkB5SNh30NtwCA7RA==", "dev": true, "requires": { - "@types/d3-color": "*" + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/d3-path": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-1.0.7.tgz", - "integrity": "sha512-U8dFRG+8WhkLJr2sxZ9Cw/5WeRgBnNqMxGdA1+Z0+ZG6tK0s75OQ4OXnxeyfKuh6E4wQPY8OAKr1+iNDx01BEQ==", - "dev": true - }, - "@types/d3-polygon": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-1.0.6.tgz", - "integrity": "sha512-E6Kyodn9JThgLq20nxSbEce9ow5/ePgm9PX2EO6W1INIL4DayM7cFaiG10DStuamjYAd0X4rntW2q+GRjiIktw==", - "dev": true - }, - "@types/d3-quadtree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-1.0.6.tgz", - "integrity": "sha512-sphVuDdiSIaxLt9kQgebJW98pTktQ/xuN7Ysd8X68Rnjeg/q8+c36/ShlqU52qoKg9nob/JEHH1uQMdxURZidQ==", - "dev": true - }, - "@types/d3-random": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-1.1.1.tgz", - "integrity": "sha512-jUPeBq1XKK9/5XasTvy5QAUwFeMsjma2yt/nP02yC2Tijovx7i/W5776U/HZugxc5SSmtpx4Z3g9KFVon0QrjQ==", - "dev": true - }, - "@types/d3-scale": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-2.1.0.tgz", - "integrity": "sha512-vLzRDF5lRxZdCLUOvmw90pkiuSsZdgroBQaat0Ov7Z7OnO9iJsPSm/TZw3wW6m2z/NhIn1E4N0RLNfEi1k4kAA==", + "@babel/helper-module-imports": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.8.3.tgz", + "integrity": "sha512-R0Bx3jippsbAEtzkpZ/6FIiuzOURPcMjHp+Z6xPe6DtApDJx+w7UYyOLanZqO8+wKR9G10s/FmHXvxaMd9s6Kg==", "dev": true, "requires": { - "@types/d3-time": "*" + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/d3-scale-chromatic": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-1.3.0.tgz", - "integrity": "sha512-JqQH5uu1kmdQEa6XSu7NYzQM71lL1YreBPS5o8SnmEDcBRKL6ooykXa8iFPPOEUiTah25ydi+cTrbsogBSMNSQ==", - "dev": true - }, - "@types/d3-selection": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-1.3.4.tgz", - "integrity": "sha512-WQ6Ivy7VuUlZ/Grqc8493ZxC+y/fpvZLy5+8ELvmCr2hll8eJPUqC05l6fgRRA7kjqlpbH7lbmvY6pRKf6yzxw==", - "dev": true - }, - "@types/d3-shape": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-1.2.7.tgz", - "integrity": "sha512-b2jpGcddOseeNxchaR1SNLqA5xZAbgKix3cXiFeuGeYIEAEUu91UbtelCxOHIUTbNURFnjcbkf4plRbejNzVaQ==", + "@babel/helper-module-transforms": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.9.0.tgz", + "integrity": "sha512-0FvKyu0gpPfIQ8EkxlrAydOWROdHpBmiCiRwLkUiBGhCUPRRbVD2/tm3sFr/c/GWFrQ/ffutGUAnx7V0FzT2wA==", "dev": true, "requires": { - "@types/d3-path": "*" + "@babel/helper-module-imports": "^7.8.3", + "@babel/helper-replace-supers": "^7.8.6", + "@babel/helper-simple-access": "^7.8.3", + "@babel/helper-split-export-declaration": "^7.8.3", + "@babel/template": "^7.8.6", + "@babel/types": "^7.9.0", + "lodash": "^4.17.13" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/d3-time": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-1.0.9.tgz", - "integrity": "sha512-m+D4NbQdDlTVaO7QgXAnatR3IDxQYDMBtRhgSCi5rs9R1LPq1y7/2aqa1FJ2IWjFm1mOV63swDxonnCDlHgHMA==", - "dev": true - }, - "@types/d3-time-format": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-2.1.0.tgz", - "integrity": "sha512-/myT3I7EwlukNOX2xVdMzb8FRgNzRMpsZddwst9Ld/VFe6LyJyRp0s32l/V9XoUzk+Gqu56F/oGk6507+8BxrA==", - "dev": true - }, - "@types/d3-timer": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-1.0.8.tgz", - "integrity": "sha512-AKUgQ/nljUFcUO2P3gK24weVI5XwUTdJvjoh8gJ0yxT4aJ+d7t2Or3TB+k9dEYl14BAjoj32D0ky+YzQSVszfg==", - "dev": true - }, - "@types/d3-transition": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-1.1.3.tgz", - "integrity": "sha512-1EukXNuVu/z2G1GZpZagzFJnie9C5zze17ox/vhTgGXNy46rYAm4UkhLLlUeeZ1ndq88k95SOeC8898RpKMLOQ==", + "@babel/helper-optimise-call-expression": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.8.3.tgz", + "integrity": "sha512-Kag20n86cbO2AvHca6EJsvqAd82gc6VMGule4HwebwMlwkpXuVqrNRj6CkCV2sKxgi9MyAUnZVnZ6lJ1/vKhHQ==", "dev": true, "requires": { - "@types/d3-selection": "*" + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/d3-voronoi": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/@types/d3-voronoi/-/d3-voronoi-1.1.8.tgz", - "integrity": "sha512-zqNhW7QsYQGlfOdrwPNPG3Wk64zUa4epKRurkJ/dVc6oeXrB+iTDt8sRZ0KZKOOXvvfa1dcdB0e45TZeLBiodQ==", + "@babel/helper-plugin-utils": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.8.3.tgz", + "integrity": "sha512-j+fq49Xds2smCUNYmEHF9kGNkhbet6yVIBp4e6oeQpH1RUs/Ir06xUKzDjDkGcaaokPiTNs2JBWHjaE4csUkZQ==", "dev": true }, - "@types/d3-zoom": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-1.7.3.tgz", - "integrity": "sha512-Tz7+z4+Id0MxERw/ozinC5QHJmGLARs9Mpi/7VVfiR+9AHcFGe9q+fjQa30/oPNY8WPuCh5p5uuXmBYAJ3y91Q==", + "@babel/helper-regex": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-regex/-/helper-regex-7.8.3.tgz", + "integrity": "sha512-BWt0QtYv/cg/NecOAZMdcn/waj/5P26DR4mVLXfFtDokSR6fyuG0Pj+e2FqtSME+MqED1khnSMulkmGl8qWiUQ==", "dev": true, "requires": { - "@types/d3-interpolate": "*", - "@types/d3-selection": "*" + "lodash": "^4.17.13" } }, - "@types/dagre": { - "version": "0.7.40", - "resolved": "https://registry.npmjs.org/@types/dagre/-/dagre-0.7.40.tgz", - "integrity": "sha512-XvaIdpHNW4AeyvPpKuvdpN8yn4RZBztPhJtLeHRtspCNUcgrZn/B9lA0KMIUlyVdIH77f82KzErR+H/ayrnoyQ==", - "dev": true - }, - "@types/enzyme": { - "version": "3.1.15", - "resolved": "https://registry.npmjs.org/@types/enzyme/-/enzyme-3.1.15.tgz", - "integrity": "sha512-6b4JWgV+FNec1c4+8HauGbXg5gRc1oQK93t2+4W+bHjG/PzO+iPvagY6d6bXAZ+t+ps51Zb2F9LQ4vl0S0Epog==", + "@babel/helper-remap-async-to-generator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.8.3.tgz", + "integrity": "sha512-kgwDmw4fCg7AVgS4DukQR/roGp+jP+XluJE5hsRZwxCYGg+Rv9wSGErDWhlI90FODdYfd4xG4AQRiMDjjN0GzA==", "dev": true, "requires": { - "@types/cheerio": "*", - "@types/react": "*" + "@babel/helper-annotate-as-pure": "^7.8.3", + "@babel/helper-wrap-function": "^7.8.3", + "@babel/template": "^7.8.3", + "@babel/traverse": "^7.8.3", + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/enzyme-adapter-react-16": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@types/enzyme-adapter-react-16/-/enzyme-adapter-react-16-1.0.3.tgz", - "integrity": "sha512-9eRLBsC/Djkys05BdTWgav8v6fSCjyzjNuLwG2sfa2b2g/VAN10luP0zB0VwtOWFQ0LGjIboJJvIsVdU5gqRmg==", + "@babel/helper-replace-supers": { + "version": "7.8.6", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.8.6.tgz", + "integrity": "sha512-PeMArdA4Sv/Wf4zXwBKPqVj7n9UF/xg6slNRtZW84FM7JpE1CbG8B612FyM4cxrf4fMAMGO0kR7voy1ForHHFA==", "dev": true, "requires": { - "@types/enzyme": "*" + "@babel/helper-member-expression-to-functions": "^7.8.3", + "@babel/helper-optimise-call-expression": "^7.8.3", + "@babel/traverse": "^7.8.6", + "@babel/types": "^7.8.6" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/events": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@types/events/-/events-1.2.0.tgz", - "integrity": "sha512-KEIlhXnIutzKwRbQkGWb/I4HFqBuUykAdHgDED6xqwXJfONCjF5VoE0cXEiurh3XauygxzeDzgtXUqvLkxFzzA==", - "dev": true - }, - "@types/express": { - "version": "4.16.0", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.16.0.tgz", - "integrity": "sha512-TtPEYumsmSTtTetAPXlJVf3kEqb6wZK0bZojpJQrnD/djV4q1oB6QQ8aKvKqwNPACoe02GNiy5zDzcYivR5Z2w==", + "@babel/helper-simple-access": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.8.3.tgz", + "integrity": "sha512-VNGUDjx5cCWg4vvCTR8qQ7YJYZ+HBjxOgXEl7ounz+4Sn7+LMD3CFrCTEU6/qXKbA2nKg21CwhhBzO0RpRbdCw==", "dev": true, "requires": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "*", - "@types/serve-static": "*" + "@babel/template": "^7.8.3", + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/express-serve-static-core": { - "version": "4.16.0", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.16.0.tgz", - "integrity": "sha512-lTeoCu5NxJU4OD9moCgm0ESZzweAx0YqsAcab6OB0EB3+As1OaHtKnaGJvcngQxYsi9UNv0abn4/DRavrRxt4w==", + "@babel/helper-split-export-declaration": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.8.3.tgz", + "integrity": "sha512-3x3yOeyBhW851hroze7ElzdkeRXQYQbFIb7gLK1WQYsw2GWDay5gAJNw1sWJ0VFP6z5J1whqeXH/WCdCjZv6dA==", "dev": true, "requires": { - "@types/events": "*", - "@types/node": "*", - "@types/range-parser": "*" + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/geojson": { - "version": "7946.0.4", - "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.4.tgz", - "integrity": "sha512-MHmwBtCb7OCv1DSivz2UNJXPGU/1btAWRKlqJ2saEhVJkpkvqHMMaOpKg0v4sAbDWSQekHGvPVMM8nQ+Jen03Q==", + "@babel/helper-validator-identifier": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.9.5.tgz", + "integrity": "sha512-/8arLKUFq882w4tWGj9JYzRpAlZgiWUJ+dtteNTDqrRBz9Iguck9Rn3ykuBDoUwh2TO4tSAJlrxDUOXWklJe4g==", "dev": true }, - "@types/history": { - "version": "4.7.2", - "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.2.tgz", - "integrity": "sha512-ui3WwXmjTaY73fOQ3/m3nnajU/Orhi6cEu5rzX+BrAAJxa3eITXZ5ch9suPqtM03OWhAHhPSyBGCN4UKoxO20Q==", - "dev": true + "@babel/helper-wrap-function": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.8.3.tgz", + "integrity": "sha512-LACJrbUET9cQDzb6kG7EeD7+7doC3JNvUgTEQOx2qaO1fKlzE/Bf05qs9w1oXQMmXlPO65lC3Tq9S6gZpTErEQ==", + "dev": true, + "requires": { + "@babel/helper-function-name": "^7.8.3", + "@babel/template": "^7.8.3", + "@babel/traverse": "^7.8.3", + "@babel/types": "^7.8.3" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } + } }, - "@types/http-proxy": { - "version": "1.16.2", - "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.16.2.tgz", - "integrity": "sha512-GgqePmC3rlsn1nv+kx5OviPuUBU2omhnlXOaJSXFgOdsTcScNFap+OaCb2ip9Bm4m5L8EOehgT5d9M4uNB90zg==", + "@babel/helpers": { + "version": "7.9.2", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.9.2.tgz", + "integrity": "sha512-JwLvzlXVPjO8eU9c/wF9/zOIN7X6h8DYf7mG4CiFRZRvZNKEF5dQ3H3V+ASkHoIB3mWhatgl5ONhyqHRI6MppA==", "dev": true, "requires": { - "@types/events": "*", - "@types/node": "*" + "@babel/template": "^7.8.3", + "@babel/traverse": "^7.9.0", + "@babel/types": "^7.9.0" + }, + "dependencies": { + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "@types/http-proxy-middleware": { - "version": "0.17.6", - "resolved": "https://registry.npmjs.org/@types/http-proxy-middleware/-/http-proxy-middleware-0.17.6.tgz", - "integrity": "sha512-NocuMc3omR+yySlkgZlNUDyJa9ENGuwX8Ev7Y9zO//H989drWp18Fn+oAgZZIPu+JWtNinIxENK2TZvo53o3tw==", + "@babel/highlight": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.0.0.tgz", + "integrity": "sha512-UFMC4ZeFC48Tpvj7C8UgLvtkaUuovQX+5xNWrsIoMG8o2z+XFKjKaN9iVmS84dPwVN00W4wPmqvYoZF3EGAsfw==", "dev": true, "requires": { - "@types/connect": "*", - "@types/http-proxy": "*", - "@types/node": "*" + "chalk": "^2.0.0", + "esutils": "^2.0.2", + "js-tokens": "^4.0.0" + }, + "dependencies": { + "chalk": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", + "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + } } }, - "@types/jest": { - "version": "23.3.11", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-23.3.11.tgz", - "integrity": "sha512-eroF85PoG87XjCwzxey7yBsQNkIY/TV5myKKSG/022A0FW25afdu/uub6JDMS5eT68zBBt82S+w/MFOTjeLM3Q==", + "@babel/parser": { + "version": "7.9.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.9.4.tgz", + "integrity": "sha512-bC49otXX6N0/VYhgOMh4gnP26E9xnDZK3TmbNpxYzzz9BQLBosQwfyOe9/cXUU3txYhTzLCbcqd5c8y/OmCjHA==", "dev": true }, - "@types/js-yaml": { - "version": "3.11.4", - "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-3.11.4.tgz", - "integrity": "sha512-a42nbZzOlxbH3ORDg/brU+zlRQS8mSvZCEY4Xery66/NIA7yX4T9qk6yL9qpw4AhgwzEgc1XIhHRnivQW+zlkw==" - }, - "@types/json5": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", - "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=" - }, - "@types/jss": { - "version": "9.5.7", - "resolved": "https://registry.npmjs.org/@types/jss/-/jss-9.5.7.tgz", - "integrity": "sha512-OZimStu2QdDMtZ0h72JXqvLVbWUjXd5ZLk8vxLmfuC/nM1AabRyyGoxSufnzixrbpEcVcyy/JV5qeQu2JnjVZw==", + "@babel/plugin-proposal-async-generator-functions": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.8.3.tgz", + "integrity": "sha512-NZ9zLv848JsV3hs8ryEh7Uaz/0KsmPLqv0+PdkDJL1cJy0K4kOCFa8zc1E3mp+RHPQcpdfb/6GovEsW4VDrOMw==", + "dev": true, "requires": { - "csstype": "^2.0.0", - "indefinite-observable": "^1.0.1" + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/helper-remap-async-to-generator": "^7.8.3", + "@babel/plugin-syntax-async-generators": "^7.8.0" } }, - "@types/lodash": { - "version": "4.14.119", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.119.tgz", - "integrity": "sha512-Z3TNyBL8Vd/M9D9Ms2S3LmFq2sSMzahodD6rCS9V2N44HUMINb75jNkSuwAx7eo2ufqTdfOdtGQpNbieUjPQmw==", - "dev": true + "@babel/plugin-proposal-class-properties": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.8.3.tgz", + "integrity": "sha512-EqFhbo7IosdgPgZggHaNObkmO1kNUe3slaKu54d5OWvy+p9QIKOzK1GAEpAIsZtWVtPXUHSMcT4smvDrCfY4AA==", + "dev": true, + "requires": { + "@babel/helper-create-class-features-plugin": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "@types/mime": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-2.0.0.tgz", - "integrity": "sha512-A2TAGbTFdBw9azHbpVd+/FkdW2T6msN1uct1O9bH3vTerEHKZhTXJUQXy+hNq1B0RagfU8U+KBdqiZpxjhOUQA==", - "dev": true + "@babel/plugin-proposal-decorators": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.8.3.tgz", + "integrity": "sha512-e3RvdvS4qPJVTe288DlXjwKflpfy1hr0j5dz5WpIYYeP7vQZg2WfAEIp8k5/Lwis/m5REXEteIz6rrcDtXXG7w==", + "dev": true, + "requires": { + "@babel/helper-create-class-features-plugin": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-decorators": "^7.8.3" + } }, - "@types/mkdirp": { - "version": "0.3.29", - "resolved": "https://registry.npmjs.org/@types/mkdirp/-/mkdirp-0.3.29.tgz", - "integrity": "sha1-fyrX7FX5FEgvybHsS7GuYCjUYGY=", - "dev": true + "@babel/plugin-proposal-dynamic-import": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.8.3.tgz", + "integrity": "sha512-NyaBbyLFXFLT9FP+zk0kYlUlA8XtCUbehs67F0nnEg7KICgMc2mNkIeu9TYhKzyXMkrapZFwAhXLdnt4IYHy1w==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-dynamic-import": "^7.8.0" + } }, - "@types/node": { - "version": "10.12.18", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.18.tgz", - "integrity": "sha512-fh+pAqt4xRzPfqA6eh3Z2y6fyZavRIumvjhaCL753+TVkGKGhpPeyrJG2JftD0T9q4GF00KjefsQ+PQNDdWQaQ==", - "dev": true + "@babel/plugin-proposal-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.8.3.tgz", + "integrity": "sha512-KGhQNZ3TVCQG/MjRbAUwuH+14y9q0tpxs1nWWs3pbSleRdDro9SAMMDyye8HhY1gqZ7/NqIc8SKhya0wRDgP1Q==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-json-strings": "^7.8.0" + } }, - "@types/prop-types": { - "version": "15.5.8", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.5.8.tgz", - "integrity": "sha512-3AQoUxQcQtLHsK25wtTWIoIpgYjH3vSDroZOUr7PpCHw/jLY1RB9z9E8dBT/OSmwStVgkRNvdh+ZHNiomRieaw==" + "@babel/plugin-proposal-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-TS9MlfzXpXKt6YYomudb/KU7nQI6/xnapG6in1uZxoxDghuSMZsPb6D2fyUwNYSAp4l1iR7QtFOjkqcRYcUsfw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.0" + } }, - "@types/range-parser": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.3.tgz", - "integrity": "sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA==", - "dev": true + "@babel/plugin-proposal-numeric-separator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.8.3.tgz", + "integrity": "sha512-jWioO1s6R/R+wEHizfaScNsAx+xKgwTLNXSh7tTC4Usj3ItsPEhYkEpU4h+lpnBwq7NBVOJXfO6cRFYcX69JUQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.8.3" + } }, - "@types/react": { - "version": "16.7.18", - "resolved": "https://registry.npmjs.org/@types/react/-/react-16.7.18.tgz", - "integrity": "sha512-Tx4uu3ppK53/iHk6VpamMP3f3ahfDLEVt3ZQc8TFm30a1H3v9lMsCntBREswZIW/SKrvJjkb3Hq8UwO6GREBng==", + "@babel/plugin-proposal-object-rest-spread": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.9.5.tgz", + "integrity": "sha512-VP2oXvAf7KCYTthbUHwBlewbl1Iq059f6seJGsxMizaCdgHIeczOr7FBqELhSqfkIl04Fi8okzWzl63UKbQmmg==", + "dev": true, "requires": { - "@types/prop-types": "*", - "csstype": "^2.2.0" + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-object-rest-spread": "^7.8.0", + "@babel/plugin-transform-parameters": "^7.9.5" } }, - "@types/react-dom": { - "version": "16.0.11", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-16.0.11.tgz", - "integrity": "sha512-x6zUx9/42B5Kl2Vl9HlopV8JF64wLpX3c+Pst9kc1HgzrsH+mkehe/zmHMQTplIrR48H2gpU7ZqurQolYu8XBA==", + "@babel/plugin-proposal-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-0gkX7J7E+AtAw9fcwlVQj8peP61qhdg/89D5swOkjYbkboA2CVckn3kiyum1DE0wskGb7KJJxBdyEBApDLLVdw==", "dev": true, "requires": { - "@types/react": "*" + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.0" } }, - "@types/react-router": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-4.4.3.tgz", - "integrity": "sha512-8GmjakEBFNCLJbpg9jtDp1EDvFP0VkIPPKBpVwmB3Q+9whFoHu8rluMUXUE5SoGkEQvVOtgJzWmUsJojNpFMQQ==", + "@babel/plugin-proposal-optional-chaining": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.9.0.tgz", + "integrity": "sha512-NDn5tu3tcv4W30jNhmc2hyD5c56G6cXx4TesJubhxrJeCvuuMpttxr0OnNCqbZGhFjLrg+NIhxxC+BK5F6yS3w==", "dev": true, "requires": { - "@types/history": "*", - "@types/react": "*" + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.0" } }, - "@types/react-router-dom": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-4.3.1.tgz", - "integrity": "sha512-GbztJAScOmQ/7RsQfO4cd55RuH1W4g6V1gDW3j4riLlt+8yxYLqqsiMzmyuXBLzdFmDtX/uU2Bpcm0cmudv44A==", + "@babel/plugin-proposal-unicode-property-regex": { + "version": "7.8.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.8.8.tgz", + "integrity": "sha512-EVhjVsMpbhLw9ZfHWSx2iy13Q8Z/eg8e8ccVWt23sWQK5l1UdkoLJPN5w69UA4uITGBnEZD2JOe4QOHycYKv8A==", "dev": true, "requires": { - "@types/history": "*", - "@types/react": "*", - "@types/react-router": "*" + "@babel/helper-create-regexp-features-plugin": "^7.8.8", + "@babel/helper-plugin-utils": "^7.8.3" } }, - "@types/react-test-renderer": { - "version": "16.0.3", - "resolved": "https://registry.npmjs.org/@types/react-test-renderer/-/react-test-renderer-16.0.3.tgz", - "integrity": "sha512-NWOAxVQeJxpXuNKgw83Hah0nquiw1nUexM9qY/Hk3a+XhZwgMtaa6GLA9E1TKMT75Odb3/KE/jiBO4enTuEJjQ==", + "@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", "dev": true, "requires": { - "@types/react": "*" + "@babel/helper-plugin-utils": "^7.8.0" } }, - "@types/react-transition-group": { - "version": "2.0.15", - "resolved": "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-2.0.15.tgz", - "integrity": "sha512-S0QnNzbHoWXDbKBl/xk5dxA4FT+BNlBcI3hku991cl8Cz3ytOkUMcCRtzdX11eb86E131bSsQqy5WrPCdJYblw==", + "@babel/plugin-syntax-decorators": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.8.3.tgz", + "integrity": "sha512-8Hg4dNNT9/LcA1zQlfwuKR8BUc/if7Q7NkTam9sGTcJphLwpf2g4S42uhspQrIrR+dpzE0dtTqBVFoHl8GtnnQ==", + "dev": true, "requires": { - "@types/react": "*" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "@types/react-virtualized": { - "version": "9.18.11", - "resolved": "https://registry.npmjs.org/@types/react-virtualized/-/react-virtualized-9.18.11.tgz", - "integrity": "sha512-KT2FzDtV9YT2uN+5g9HPaME4Dtdlh7tEHEqTsOExWYzJGZjGKqHgBHTZC0vnHkzU1rXF9rVDxpi5MZuApoO7rA==", + "@babel/plugin-syntax-dynamic-import": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", "dev": true, "requires": { - "@types/prop-types": "*", - "@types/react": "*" + "@babel/helper-plugin-utils": "^7.8.0" } }, - "@types/rimraf": { - "version": "0.0.28", - "resolved": "https://registry.npmjs.org/@types/rimraf/-/rimraf-0.0.28.tgz", - "integrity": "sha1-VWJRm8eWPKyoq/fxKMrjtZTUHQY=", - "dev": true + "@babel/plugin-syntax-flow": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.8.3.tgz", + "integrity": "sha512-innAx3bUbA0KSYj2E2MNFSn9hiCeowOFLxlsuhXzw8hMQnzkDomUr9QCD7E9VF60NmnG1sNTuuv6Qf4f8INYsg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "@types/serve-static": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.2.tgz", - "integrity": "sha512-/BZ4QRLpH/bNYgZgwhKEh+5AsboDBcUdlBYgzoLX0fpj3Y2gp6EApyOlM3bK53wQS/OE1SrdSYBAbux2D1528Q==", + "@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", "dev": true, "requires": { - "@types/express-serve-static-core": "*", - "@types/mime": "*" + "@babel/helper-plugin-utils": "^7.8.0" } }, - "@types/strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-FKjsOVbC6B7bdSB5CuzyHCkK69I=", - "dev": true + "@babel/plugin-syntax-jsx": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.8.3.tgz", + "integrity": "sha512-WxdW9xyLgBdefoo0Ynn3MRSkhe5tFVxxKNVdnZSh318WrG2e2jH+E9wd/++JsqcLJZPfz87njQJ8j2Upjm0M0A==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "@types/strip-json-comments": { - "version": "0.0.30", - "resolved": "https://registry.npmjs.org/@types/strip-json-comments/-/strip-json-comments-0.0.30.tgz", - "integrity": "sha512-7NQmHra/JILCd1QqpSzl8+mJRc8ZHz3uDm8YV1Ks9IhK0epEiTw8aIErbvH9PI+6XbqhyIQy3462nEsn7UVzjQ==", - "dev": true + "@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } }, - "abab": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.0.tgz", - "integrity": "sha512-sY5AXXVZv4Y1VACTtR11UJCPHHudgY5i26Qj5TypE6DKlIApbwb5uqhXcJ5UUGbvZNRh7EeIoW+LrJumBsKp7w==" + "@babel/plugin-syntax-numeric-separator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.8.3.tgz", + "integrity": "sha512-H7dCMAdN83PcCmqmkHB5dtp+Xa9a6LKSvA2hiFBC/5alSHxM5VgWZXFqDi0YFe8XNGT6iCa+z4V4zSt/PdZ7Dw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "accepts": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz", - "integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=", + "@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, "requires": { - "mime-types": "~2.1.18", - "negotiator": "0.6.1" + "@babel/helper-plugin-utils": "^7.8.0" } }, - "acorn": { - "version": "5.7.3", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz", - "integrity": "sha512-T/zvzYRfbVojPWahDsE5evJdHb3oJoQfFbsrKM7w5Zcs++Tr257tia3BmMP8XYVjp1S9RZXQMh7gao96BlqZOw==" + "@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } }, - "acorn-dynamic-import": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/acorn-dynamic-import/-/acorn-dynamic-import-2.0.2.tgz", - "integrity": "sha1-x1K9IQvvZ5UBtsbLf8hPj0cVjMQ=", + "@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, "requires": { - "acorn": "^4.0.3" - }, - "dependencies": { - "acorn": { - "version": "4.0.13", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.13.tgz", - "integrity": "sha1-EFSVrlNh1pe9GVyCUZLhrX8lN4c=" - } + "@babel/helper-plugin-utils": "^7.8.0" } }, - "acorn-globals": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-4.3.0.tgz", - "integrity": "sha512-hMtHj3s5RnuhvHPowpBYvJVj3rAar82JiDQHvGs1zO0l10ocX/xEdBShNHTJaboucJUsScghp74pH3s7EnHHQw==", + "@babel/plugin-syntax-top-level-await": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.8.3.tgz", + "integrity": "sha512-kwj1j9lL/6Wd0hROD3b/OZZ7MSrZLqqn9RAZ5+cYYsflQ9HZBIKCUkr3+uL1MEJ1NePiUbf98jjiMQSv0NMR9g==", + "dev": true, "requires": { - "acorn": "^6.0.1", - "acorn-walk": "^6.0.1" - }, - "dependencies": { - "acorn": { - "version": "6.0.5", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.0.5.tgz", - "integrity": "sha512-i33Zgp3XWtmZBMNvCr4azvOFeWVw1Rk6p3hfi3LUDvIFraOMywb1kAtrbi+med14m4Xfpqm3zRZMT+c0FNE7kg==" - } + "@babel/helper-plugin-utils": "^7.8.3" } }, - "acorn-walk": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-6.1.1.tgz", - "integrity": "sha512-OtUw6JUTgxA2QoqqmrmQ7F2NYqiBPi/L2jqHyFtllhOUvXYQXf0Z1CYUinIfyT4bTCGmrA7gX9FvHA81uzCoVw==" + "@babel/plugin-syntax-typescript": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.8.3.tgz", + "integrity": "sha512-GO1MQ/SGGGoiEXY0e0bSpHimJvxqB7lktLLIq2pv8xG7WZ8IMEle74jIe1FhprHBWjwjZtXHkycDLZXIWM5Wfg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "address": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/address/-/address-1.0.3.tgz", - "integrity": "sha512-z55ocwKBRLryBs394Sm3ushTtBeg6VAeuku7utSoSnsJKvKcnXFIyC6vh27n3rXyxSgkJBBCAvyOn7gSUcTYjg==" + "@babel/plugin-transform-arrow-functions": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.8.3.tgz", + "integrity": "sha512-0MRF+KC8EqH4dbuITCWwPSzsyO3HIWWlm30v8BbbpOrS1B++isGxPnnuq/IZvOX5J2D/p7DQalQm+/2PnlKGxg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "agent-base": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz", - "integrity": "sha512-JVwXMr9nHYTUXsBFKUqhJwvlcYU/blreOEUkhNR2eXZIvwd+c+o5V4MgDPKWnMS/56awN3TRzIP+KoPn+roQtg==", + "@babel/plugin-transform-async-to-generator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.8.3.tgz", + "integrity": "sha512-imt9tFLD9ogt56Dd5CI/6XgpukMwd/fLGSrix2httihVe7LOGVPhyhMh1BU5kDM7iHD08i8uUtmV2sWaBFlHVQ==", "dev": true, "requires": { - "es6-promisify": "^5.0.0" + "@babel/helper-module-imports": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/helper-remap-async-to-generator": "^7.8.3" } }, - "ajv": { - "version": "5.5.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", - "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", + "@babel/plugin-transform-block-scoped-functions": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.8.3.tgz", + "integrity": "sha512-vo4F2OewqjbB1+yaJ7k2EJFHlTP3jR634Z9Cj9itpqNjuLXvhlVxgnjsHsdRgASR8xYDrx6onw4vW5H6We0Jmg==", + "dev": true, "requires": { - "co": "^4.6.0", - "fast-deep-equal": "^1.0.0", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.3.0" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "ajv-keywords": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.2.0.tgz", - "integrity": "sha1-6GuBnGAs+IIa1jdBNpjx3sAhhHo=" + "@babel/plugin-transform-block-scoping": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.8.3.tgz", + "integrity": "sha512-pGnYfm7RNRgYRi7bids5bHluENHqJhrV4bCZRwc5GamaWIIs07N4rZECcmJL6ZClwjDz1GbdMZFtPs27hTB06w==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3", + "lodash": "^4.17.13" + } }, - "align-text": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz", - "integrity": "sha1-DNkKVhCT810KmSVsIrcGlDP60Rc=", + "@babel/plugin-transform-classes": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.9.5.tgz", + "integrity": "sha512-x2kZoIuLC//O5iA7PEvecB105o7TLzZo8ofBVhP79N+DO3jaX+KYfww9TQcfBEZD0nikNyYcGB1IKtRq36rdmg==", + "dev": true, "requires": { - "kind-of": "^3.0.2", - "longest": "^1.0.1", - "repeat-string": "^1.5.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } + "@babel/helper-annotate-as-pure": "^7.8.3", + "@babel/helper-define-map": "^7.8.3", + "@babel/helper-function-name": "^7.9.5", + "@babel/helper-optimise-call-expression": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/helper-replace-supers": "^7.8.6", + "@babel/helper-split-export-declaration": "^7.8.3", + "globals": "^11.1.0" } }, - "alphanum-sort": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/alphanum-sort/-/alphanum-sort-1.0.2.tgz", - "integrity": "sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=" + "@babel/plugin-transform-computed-properties": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.8.3.tgz", + "integrity": "sha512-O5hiIpSyOGdrQZRQ2ccwtTVkgUDBBiCuK//4RJ6UfePllUTCENOzKxfh6ulckXKc0DixTFLCfb2HVkNA7aDpzA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "ansi-align": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-2.0.0.tgz", - "integrity": "sha1-w2rsy6VjuJzrVW82kPCx2eNUf38=", + "@babel/plugin-transform-destructuring": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.9.5.tgz", + "integrity": "sha512-j3OEsGel8nHL/iusv/mRd5fYZ3DrOxWC82x0ogmdN/vHfAP4MYw+AFKYanzWlktNwikKvlzUV//afBW5FTp17Q==", + "dev": true, "requires": { - "string-width": "^2.0.0" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "ansi-escapes": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.1.0.tgz", - "integrity": "sha512-UgAb8H9D41AQnu/PbWlCofQVcnV4Gs2bBJi9eZPxfU/hgglFh3SMDMENRIqdr7H6XFnXdoknctFByVsCOotTVw==" - }, - "ansi-html": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/ansi-html/-/ansi-html-0.0.7.tgz", - "integrity": "sha1-gTWEAhliqenm/QOflA0S9WynhZ4=" - }, - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=" + "@babel/plugin-transform-dotall-regex": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.8.3.tgz", + "integrity": "sha512-kLs1j9Nn4MQoBYdRXH6AeaXMbEJFaFu/v1nQkvib6QzTj8MZI5OQzqmD83/2jEM1z0DLilra5aWO5YpyC0ALIw==", + "dev": true, + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "@babel/plugin-transform-duplicate-keys": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.8.3.tgz", + "integrity": "sha512-s8dHiBUbcbSgipS4SMFuWGqCvyge5V2ZeAWzR6INTVC3Ltjig/Vw1G2Gztv0vU/hRG9X8IvKvYdoksnUfgXOEQ==", + "dev": true, "requires": { - "color-convert": "^1.9.0" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "ansicolors": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/ansicolors/-/ansicolors-0.3.2.tgz", - "integrity": "sha1-ZlWX3oap/+Oqm/vmyuXG6kJrSXk=", - "dev": true + "@babel/plugin-transform-exponentiation-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.8.3.tgz", + "integrity": "sha512-zwIpuIymb3ACcInbksHaNcR12S++0MDLKkiqXHl3AzpgdKlFNhog+z/K0+TGW+b0w5pgTq4H6IwV/WhxbGYSjQ==", + "dev": true, + "requires": { + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "anymatch": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-1.3.2.tgz", - "integrity": "sha512-0XNayC8lTHQ2OI8aljNCN3sSx6hsr/1+rlcDAotXJR7C1oZZHCNsfpbKwMjRA3Uqb5tF1Rae2oloTr4xpq+WjA==", + "@babel/plugin-transform-flow-strip-types": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.9.0.tgz", + "integrity": "sha512-7Qfg0lKQhEHs93FChxVLAvhBshOPQDtJUTVHr/ZwQNRccCm4O9D79r9tVSoV8iNwjP1YgfD+e/fgHcPkN1qEQg==", + "dev": true, "requires": { - "micromatch": "^2.1.5", - "normalize-path": "^2.0.0" - }, - "dependencies": { - "arr-diff": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-2.0.0.tgz", - "integrity": "sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8=", - "requires": { - "arr-flatten": "^1.0.1" - } - }, - "array-unique": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz", - "integrity": "sha1-odl8yvy8JiXMcPrc6zalDFiwGlM=" - }, - "braces": { - "version": "1.8.5", - "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.5.tgz", - "integrity": "sha1-uneWLhLf+WnWt2cR6RS3N4V79qc=", - "requires": { - "expand-range": "^1.8.1", - "preserve": "^0.2.0", - "repeat-element": "^1.1.2" - } - }, - "expand-brackets": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz", - "integrity": "sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s=", - "requires": { - "is-posix-bracket": "^0.1.0" - } - }, - "extglob": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.2.tgz", - "integrity": "sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE=", - "requires": { - "is-extglob": "^1.0.0" - } - }, - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "requires": { - "is-extglob": "^1.0.0" - } - }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - }, - "micromatch": { - "version": "2.3.11", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz", - "integrity": "sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU=", - "requires": { - "arr-diff": "^2.0.0", - "array-unique": "^0.2.1", - "braces": "^1.8.2", - "expand-brackets": "^0.1.4", - "extglob": "^0.3.1", - "filename-regex": "^2.0.0", - "is-extglob": "^1.0.0", - "is-glob": "^2.0.1", - "kind-of": "^3.0.2", - "normalize-path": "^2.0.1", - "object.omit": "^2.0.0", - "parse-glob": "^3.0.4", - "regex-cache": "^0.4.2" - } - } + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-flow": "^7.8.3" } }, - "append-transform": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-0.4.0.tgz", - "integrity": "sha1-126/jKlNJ24keja61EpLdKthGZE=", + "@babel/plugin-transform-for-of": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.9.0.tgz", + "integrity": "sha512-lTAnWOpMwOXpyDx06N+ywmF3jNbafZEqZ96CGYabxHrxNX8l5ny7dt4bK/rGwAh9utyP2b2Hv7PlZh1AAS54FQ==", + "dev": true, "requires": { - "default-require-extensions": "^1.0.0" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "aproba": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", - "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + "@babel/plugin-transform-function-name": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.8.3.tgz", + "integrity": "sha512-rO/OnDS78Eifbjn5Py9v8y0aR+aSYhDhqAwVfsTl0ERuMZyr05L1aFSCJnbv2mmsLkit/4ReeQ9N2BgLnOcPCQ==", + "dev": true, + "requires": { + "@babel/helper-function-name": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "@babel/plugin-transform-literals": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.8.3.tgz", + "integrity": "sha512-3Tqf8JJ/qB7TeldGl+TT55+uQei9JfYaregDcEAyBZ7akutriFrt6C/wLYIer6OYhleVQvH/ntEhjE/xMmy10A==", + "dev": true, "requires": { - "sprintf-js": "~1.0.2" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "arr-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", - "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=" + "@babel/plugin-transform-member-expression-literals": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.8.3.tgz", + "integrity": "sha512-3Wk2EXhnw+rP+IDkK6BdtPKsUE5IeZ6QOGrPYvw52NwBStw9V1ZVzxgK6fSKSxqUvH9eQPR3tm3cOq79HlsKYA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "arr-flatten": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", - "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==" + "@babel/plugin-transform-modules-amd": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.9.0.tgz", + "integrity": "sha512-vZgDDF003B14O8zJy0XXLnPH4sg+9X5hFBBGN1V+B2rgrB+J2xIypSN6Rk9imB2hSTHQi5OHLrFWsZab1GMk+Q==", + "dev": true, + "requires": { + "@babel/helper-module-transforms": "^7.9.0", + "@babel/helper-plugin-utils": "^7.8.3", + "babel-plugin-dynamic-import-node": "^2.3.0" + } }, - "arr-union": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", - "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=" + "@babel/plugin-transform-modules-commonjs": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.9.0.tgz", + "integrity": "sha512-qzlCrLnKqio4SlgJ6FMMLBe4bySNis8DFn1VkGmOcxG9gqEyPIOzeQrA//u0HAKrWpJlpZbZMPB1n/OPa4+n8g==", + "dev": true, + "requires": { + "@babel/helper-module-transforms": "^7.9.0", + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/helper-simple-access": "^7.8.3", + "babel-plugin-dynamic-import-node": "^2.3.0" + } }, - "array-equal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz", - "integrity": "sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM=" + "@babel/plugin-transform-modules-systemjs": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.9.0.tgz", + "integrity": "sha512-FsiAv/nao/ud2ZWy4wFacoLOm5uxl0ExSQ7ErvP7jpoihLR6Cq90ilOFyX9UXct3rbtKsAiZ9kFt5XGfPe/5SQ==", + "dev": true, + "requires": { + "@babel/helper-hoist-variables": "^7.8.3", + "@babel/helper-module-transforms": "^7.9.0", + "@babel/helper-plugin-utils": "^7.8.3", + "babel-plugin-dynamic-import-node": "^2.3.0" + } }, - "array-filter": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/array-filter/-/array-filter-0.0.1.tgz", - "integrity": "sha1-fajPLiZijtcygDWB/SH2fKzS7uw=" + "@babel/plugin-transform-modules-umd": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.9.0.tgz", + "integrity": "sha512-uTWkXkIVtg/JGRSIABdBoMsoIeoHQHPTL0Y2E7xf5Oj7sLqwVsNXOkNk0VJc7vF0IMBsPeikHxFjGe+qmwPtTQ==", + "dev": true, + "requires": { + "@babel/helper-module-transforms": "^7.9.0", + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "array-find-index": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", - "integrity": "sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=" + "@babel/plugin-transform-named-capturing-groups-regex": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.8.3.tgz", + "integrity": "sha512-f+tF/8UVPU86TrCb06JoPWIdDpTNSGGcAtaD9mLP0aYGA0OS0j7j7DHJR0GTFrUZPUU6loZhbsVZgTh0N+Qdnw==", + "dev": true, + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.8.3" + } }, - "array-flatten": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz", - "integrity": "sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==" + "@babel/plugin-transform-new-target": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.8.3.tgz", + "integrity": "sha512-QuSGysibQpyxexRyui2vca+Cmbljo8bcRckgzYV4kRIsHpVeyeC3JDO63pY+xFZ6bWOBn7pfKZTqV4o/ix9sFw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "array-includes": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.0.3.tgz", - "integrity": "sha1-GEtI9i2S10UrsxsyMWXH+L0CJm0=", + "@babel/plugin-transform-object-super": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.8.3.tgz", + "integrity": "sha512-57FXk+gItG/GejofIyLIgBKTas4+pEU47IXKDBWFTxdPd7F80H8zybyAY7UoblVfBhBGs2EKM+bJUu2+iUYPDQ==", + "dev": true, "requires": { - "define-properties": "^1.1.2", - "es-abstract": "^1.7.0" + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/helper-replace-supers": "^7.8.3" } }, - "array-map": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/array-map/-/array-map-0.0.0.tgz", - "integrity": "sha1-iKK6tz0c97zVwbEYoAP2b2ZfpmI=" + "@babel/plugin-transform-parameters": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.9.5.tgz", + "integrity": "sha512-0+1FhHnMfj6lIIhVvS4KGQJeuhe1GI//h5uptK4PvLt+BGBxsoUJbd3/IW002yk//6sZPlFgsG1hY6OHLcy6kA==", + "dev": true, + "requires": { + "@babel/helper-get-function-arity": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "array-reduce": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/array-reduce/-/array-reduce-0.0.0.tgz", - "integrity": "sha1-FziZ0//Rx9k4PkR5Ul2+J4yrXys=" + "@babel/plugin-transform-property-literals": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.8.3.tgz", + "integrity": "sha512-uGiiXAZMqEoQhRWMK17VospMZh5sXWg+dlh2soffpkAl96KAm+WZuJfa6lcELotSRmooLqg0MWdH6UUq85nmmg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "array-union": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", - "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", + "@babel/plugin-transform-react-constant-elements": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.9.0.tgz", + "integrity": "sha512-wXMXsToAUOxJuBBEHajqKLFWcCkOSLshTI2ChCFFj1zDd7od4IOxiwLCOObNUvOpkxLpjIuaIdBMmNt6ocCPAw==", + "dev": true, "requires": { - "array-uniq": "^1.0.1" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "array-uniq": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz", - "integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=" + "@babel/plugin-transform-react-display-name": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.8.3.tgz", + "integrity": "sha512-3Jy/PCw8Fe6uBKtEgz3M82ljt+lTg+xJaM4og+eyu83qLT87ZUSckn0wy7r31jflURWLO83TW6Ylf7lyXj3m5A==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "array-unique": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", - "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=" + "@babel/plugin-transform-react-jsx": { + "version": "7.9.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.9.4.tgz", + "integrity": "sha512-Mjqf3pZBNLt854CK0C/kRuXAnE6H/bo7xYojP+WGtX8glDGSibcwnsWwhwoSuRg0+EBnxPC1ouVnuetUIlPSAw==", + "dev": true, + "requires": { + "@babel/helper-builder-react-jsx": "^7.9.0", + "@babel/helper-builder-react-jsx-experimental": "^7.9.0", + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-jsx": "^7.8.3" + } }, - "array.prototype.flat": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.1.tgz", - "integrity": "sha512-rVqIs330nLJvfC7JqYvEWwqVr5QjYF1ib02i3YJtR/fICO6527Tjpc/e4Mvmxh3GIePPreRXMdaGyC99YphWEw==", + "@babel/plugin-transform-react-jsx-development": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.9.0.tgz", + "integrity": "sha512-tK8hWKrQncVvrhvtOiPpKrQjfNX3DtkNLSX4ObuGcpS9p0QrGetKmlySIGR07y48Zft8WVgPakqd/bk46JrMSw==", "dev": true, "requires": { - "define-properties": "^1.1.2", - "es-abstract": "^1.10.0", - "function-bind": "^1.1.1" + "@babel/helper-builder-react-jsx-experimental": "^7.9.0", + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-jsx": "^7.8.3" } }, - "arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=" + "@babel/plugin-transform-react-jsx-self": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.9.0.tgz", + "integrity": "sha512-K2ObbWPKT7KUTAoyjCsFilOkEgMvFG+y0FqOl6Lezd0/13kMkkjHskVsZvblRPj1PHA44PrToaZANrryppzTvQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-jsx": "^7.8.3" + } }, - "asap": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", - "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=" + "@babel/plugin-transform-react-jsx-source": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.9.0.tgz", + "integrity": "sha512-K6m3LlSnTSfRkM6FcRk8saNEeaeyG5k7AVkBU2bZK3+1zdkSED3qNdsWrUgQBeTVD2Tp3VMmerxVO2yM5iITmw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-jsx": "^7.8.3" + } }, - "asn1": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "@babel/plugin-transform-regenerator": { + "version": "7.8.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.8.7.tgz", + "integrity": "sha512-TIg+gAl4Z0a3WmD3mbYSk+J9ZUH6n/Yc57rtKRnlA/7rcCvpekHXe0CMZHP1gYp7/KLe9GHTuIba0vXmls6drA==", + "dev": true, "requires": { - "safer-buffer": "~2.1.0" + "regenerator-transform": "^0.14.2" } }, - "asn1.js": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz", - "integrity": "sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw==", + "@babel/plugin-transform-reserved-words": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.8.3.tgz", + "integrity": "sha512-mwMxcycN3omKFDjDQUl+8zyMsBfjRFr0Zn/64I41pmjv4NJuqcYlEtezwYtw9TFd9WR1vN5kiM+O0gMZzO6L0A==", + "dev": true, "requires": { - "bn.js": "^4.0.0", - "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "assert": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/assert/-/assert-1.4.1.tgz", - "integrity": "sha1-mZEtWRg2tab1s0XA8H7vwI/GXZE=", + "@babel/plugin-transform-runtime": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.9.0.tgz", + "integrity": "sha512-pUu9VSf3kI1OqbWINQ7MaugnitRss1z533436waNXp+0N3ur3zfut37sXiQMxkuCF4VUjwZucen/quskCh7NHw==", + "dev": true, "requires": { - "util": "0.10.3" + "@babel/helper-module-imports": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3", + "resolve": "^1.8.1", + "semver": "^5.5.1" }, "dependencies": { - "inherits": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", - "integrity": "sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE=" - }, - "util": { - "version": "0.10.3", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", - "integrity": "sha1-evsa/lCAUkZInj23/g7TeTNqwPk=", + "resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, "requires": { - "inherits": "2.0.1" + "path-parse": "^1.0.6" } } } }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" - }, - "assign-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", - "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=" - }, - "astral-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", - "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==" - }, - "async": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/async/-/async-2.6.1.tgz", - "integrity": "sha512-fNEiL2+AZt6AlAw/29Cr0UDe4sRAHCpEHh54WMz+Bb7QfNcFw4h3loofyJpLeQs4Yx7yuqu/2dLgM5hKOs6HlQ==", + "@babel/plugin-transform-shorthand-properties": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.8.3.tgz", + "integrity": "sha512-I9DI6Odg0JJwxCHzbzW08ggMdCezoWcuQRz3ptdudgwaHxTjxw5HgdFJmZIkIMlRymL6YiZcped4TTCB0JcC8w==", + "dev": true, "requires": { - "lodash": "^4.17.10" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "async-chain-proxy": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/async-chain-proxy/-/async-chain-proxy-0.1.5.tgz", - "integrity": "sha512-JYUzBrCWkKdBQ02H2f1N8xR4JXeWGcw2V8i2AzJjeXGTz40rqqbKkDmQieL8iHNp70W1M44WXqFsvk4Cx49H1Q==", + "@babel/plugin-transform-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.8.3.tgz", + "integrity": "sha512-CkuTU9mbmAoFOI1tklFWYYbzX5qCIZVXPVy0jpXgGwkplCndQAa58s2jr66fTeQnA64bDox0HL4U56CFYoyC7g==", "dev": true, "requires": { - "babel-polyfill": "^6.23.0" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "async-each": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.1.tgz", - "integrity": "sha1-GdOGodntxufByF04iu28xW0zYC0=" - }, - "async-limiter": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.0.tgz", - "integrity": "sha512-jp/uFnooOiO+L211eZOoSyzpOITMXx1rBITauYykG3BRYPu8h0UcxsPNB04RR5vo4Tyz3+ay17tR6JVf9qzYWg==" - }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" - }, - "atob": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", - "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==" + "@babel/plugin-transform-sticky-regex": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.8.3.tgz", + "integrity": "sha512-9Spq0vGCD5Bb4Z/ZXXSK5wbbLFMG085qd2vhL1JYu1WcQ5bXqZBAYRzU1d+p79GcHs2szYv5pVQCX13QgldaWw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/helper-regex": "^7.8.3" + } }, - "attr-accept": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-1.1.3.tgz", - "integrity": "sha512-iT40nudw8zmCweivz6j58g+RT33I4KbaIvRUhjNmDwO2WmsQUxFEZZYZ5w3vXe5x5MX9D7mfvA/XaLOZYFR9EQ==", + "@babel/plugin-transform-template-literals": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.8.3.tgz", + "integrity": "sha512-820QBtykIQOLFT8NZOcTRJ1UNuztIELe4p9DCgvj4NK+PwluSJ49we7s9FB1HIGNIYT7wFUJ0ar2QpCDj0escQ==", + "dev": true, "requires": { - "core-js": "^2.5.0" - }, - "dependencies": { - "core-js": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.1.tgz", - "integrity": "sha512-L72mmmEayPJBejKIWe2pYtGis5r0tQ5NaJekdhyXgeMQTpJoBsH0NL4ElY2LfSoV15xeQWKQ+XTTOZdyero5Xg==" - } + "@babel/helper-annotate-as-pure": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3" } }, - "autoprefixer": { - "version": "7.1.6", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-7.1.6.tgz", - "integrity": "sha512-C9yv/UF3X+eJTi/zvfxuyfxmLibYrntpF3qoJYrMeQwgUJOZrZvpJiMG2FMQ3qnhWtF/be4pYONBBw95ZGe3vA==", + "@babel/plugin-transform-typeof-symbol": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.8.4.tgz", + "integrity": "sha512-2QKyfjGdvuNfHsb7qnBBlKclbD4CfshH2KvDabiijLMGXPHJXGxtDzwIF7bQP+T0ysw8fYTtxPafgfs/c1Lrqg==", + "dev": true, "requires": { - "browserslist": "^2.5.1", - "caniuse-lite": "^1.0.30000748", - "normalize-range": "^0.1.2", - "num2fraction": "^1.2.2", - "postcss": "^6.0.13", - "postcss-value-parser": "^3.2.3" + "@babel/helper-plugin-utils": "^7.8.3" } }, - "aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" + "@babel/plugin-transform-typescript": { + "version": "7.9.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.9.4.tgz", + "integrity": "sha512-yeWeUkKx2auDbSxRe8MusAG+n4m9BFY/v+lPjmQDgOFX5qnySkUY5oXzkp6FwPdsYqnKay6lorXYdC0n3bZO7w==", + "dev": true, + "requires": { + "@babel/helper-create-class-features-plugin": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-syntax-typescript": "^7.8.3" + } }, - "aws4": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==" + "@babel/plugin-transform-unicode-regex": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.8.3.tgz", + "integrity": "sha512-+ufgJjYdmWfSQ+6NS9VGUR2ns8cjJjYbrbi11mZBTaWm+Fui/ncTLFF28Ei1okavY+xkojGr1eJxNsWYeA5aZw==", + "dev": true, + "requires": { + "@babel/helper-create-regexp-features-plugin": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3" + } }, - "babel-code-frame": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", - "integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=", + "@babel/polyfill": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@babel/polyfill/-/polyfill-7.4.4.tgz", + "integrity": "sha512-WlthFLfhQQhh+A2Gn5NSFl0Huxz36x86Jn+E9OW7ibK8edKPq+KLy4apM1yDpQ8kJOVi1OVjpP4vSDLdrI04dg==", "requires": { - "chalk": "^1.1.3", - "esutils": "^2.0.2", - "js-tokens": "^3.0.2" + "core-js": "^2.6.5", + "regenerator-runtime": "^0.13.2" }, "dependencies": { - "js-tokens": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", - "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=" + "core-js": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.9.tgz", + "integrity": "sha512-HOpZf6eXmnl7la+cUdMnLvUxKNqLUzJvgIziQ0DiF3JwSImNphIqdGqzj6hIKyX04MmV0poclQ7+wjWvxQyR2A==" + }, + "regenerator-runtime": { + "version": "0.13.3", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz", + "integrity": "sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw==" } } }, - "babel-core": { - "version": "6.26.3", - "resolved": "https://registry.npmjs.org/babel-core/-/babel-core-6.26.3.tgz", - "integrity": "sha512-6jyFLuDmeidKmUEb3NM+/yawG0M2bDZ9Z1qbZP59cyHLz8kYGKYwpJP0UwUKKUiTRNvxfLesJnTedqczP7cTDA==", + "@babel/preset-env": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.9.5.tgz", + "integrity": "sha512-eWGYeADTlPJH+wq1F0wNfPbVS1w1wtmMJiYk55Td5Yu28AsdR9AsC97sZ0Qq8fHqQuslVSIYSGJMcblr345GfQ==", + "dev": true, "requires": { - "babel-code-frame": "^6.26.0", - "babel-generator": "^6.26.0", - "babel-helpers": "^6.24.1", - "babel-messages": "^6.23.0", - "babel-register": "^6.26.0", - "babel-runtime": "^6.26.0", - "babel-template": "^6.26.0", - "babel-traverse": "^6.26.0", - "babel-types": "^6.26.0", - "babylon": "^6.18.0", - "convert-source-map": "^1.5.1", - "debug": "^2.6.9", - "json5": "^0.5.1", - "lodash": "^4.17.4", - "minimatch": "^3.0.4", - "path-is-absolute": "^1.0.1", - "private": "^0.1.8", - "slash": "^1.0.0", - "source-map": "^0.5.7" + "@babel/compat-data": "^7.9.0", + "@babel/helper-compilation-targets": "^7.8.7", + "@babel/helper-module-imports": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-proposal-async-generator-functions": "^7.8.3", + "@babel/plugin-proposal-dynamic-import": "^7.8.3", + "@babel/plugin-proposal-json-strings": "^7.8.3", + "@babel/plugin-proposal-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-proposal-numeric-separator": "^7.8.3", + "@babel/plugin-proposal-object-rest-spread": "^7.9.5", + "@babel/plugin-proposal-optional-catch-binding": "^7.8.3", + "@babel/plugin-proposal-optional-chaining": "^7.9.0", + "@babel/plugin-proposal-unicode-property-regex": "^7.8.3", + "@babel/plugin-syntax-async-generators": "^7.8.0", + "@babel/plugin-syntax-dynamic-import": "^7.8.0", + "@babel/plugin-syntax-json-strings": "^7.8.0", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.0", + "@babel/plugin-syntax-numeric-separator": "^7.8.0", + "@babel/plugin-syntax-object-rest-spread": "^7.8.0", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.0", + "@babel/plugin-syntax-optional-chaining": "^7.8.0", + "@babel/plugin-syntax-top-level-await": "^7.8.3", + "@babel/plugin-transform-arrow-functions": "^7.8.3", + "@babel/plugin-transform-async-to-generator": "^7.8.3", + "@babel/plugin-transform-block-scoped-functions": "^7.8.3", + "@babel/plugin-transform-block-scoping": "^7.8.3", + "@babel/plugin-transform-classes": "^7.9.5", + "@babel/plugin-transform-computed-properties": "^7.8.3", + "@babel/plugin-transform-destructuring": "^7.9.5", + "@babel/plugin-transform-dotall-regex": "^7.8.3", + "@babel/plugin-transform-duplicate-keys": "^7.8.3", + "@babel/plugin-transform-exponentiation-operator": "^7.8.3", + "@babel/plugin-transform-for-of": "^7.9.0", + "@babel/plugin-transform-function-name": "^7.8.3", + "@babel/plugin-transform-literals": "^7.8.3", + "@babel/plugin-transform-member-expression-literals": "^7.8.3", + "@babel/plugin-transform-modules-amd": "^7.9.0", + "@babel/plugin-transform-modules-commonjs": "^7.9.0", + "@babel/plugin-transform-modules-systemjs": "^7.9.0", + "@babel/plugin-transform-modules-umd": "^7.9.0", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.8.3", + "@babel/plugin-transform-new-target": "^7.8.3", + "@babel/plugin-transform-object-super": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.9.5", + "@babel/plugin-transform-property-literals": "^7.8.3", + "@babel/plugin-transform-regenerator": "^7.8.7", + "@babel/plugin-transform-reserved-words": "^7.8.3", + "@babel/plugin-transform-shorthand-properties": "^7.8.3", + "@babel/plugin-transform-spread": "^7.8.3", + "@babel/plugin-transform-sticky-regex": "^7.8.3", + "@babel/plugin-transform-template-literals": "^7.8.3", + "@babel/plugin-transform-typeof-symbol": "^7.8.4", + "@babel/plugin-transform-unicode-regex": "^7.8.3", + "@babel/preset-modules": "^0.1.3", + "@babel/types": "^7.9.5", + "browserslist": "^4.9.1", + "core-js-compat": "^3.6.2", + "invariant": "^2.2.2", + "levenary": "^1.1.1", + "semver": "^5.5.0" }, "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, "requires": { - "ms": "2.0.0" + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" } }, - "json5": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-0.5.1.tgz", - "integrity": "sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE=" + "browserslist": { + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.11.1.tgz", + "integrity": "sha512-DCTr3kDrKEYNw6Jb9HFxVLQNaue8z+0ZfRBRjmCunKDEXEBajKDj2Y+Uelg+Pi29OnvaSGwjOsnRyNEkXzHg5g==", + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30001038", + "electron-to-chromium": "^1.3.390", + "node-releases": "^1.1.53", + "pkg-up": "^2.0.0" + } + }, + "caniuse-lite": { + "version": "1.0.30001039", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001039.tgz", + "integrity": "sha512-SezbWCTT34eyFoWHgx8UWso7YtvtM7oosmFoXbCkdC6qJzRfBTeTgE9REtKtiuKXuMwWTZEvdnFNGAyVMorv8Q==", + "dev": true + }, + "electron-to-chromium": { + "version": "1.3.399", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.399.tgz", + "integrity": "sha512-+NBhK0/v17pls7CSh3Cx5Ir3tsGmtLPMMAO4Nz272bre2wzdykLEsev5wjOd3rYMt2/kSS681ufFT7Dywxq1sw==", + "dev": true + }, + "node-releases": { + "version": "1.1.53", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.53.tgz", + "integrity": "sha512-wp8zyQVwef2hpZ/dJH7SfSrIPD6YoJz6BDQDpGEkcA0s3LpAQoxBIYmfIq6QAhC1DhwsyCgTaTTcONwX8qzCuQ==", + "dev": true } } }, - "babel-generator": { - "version": "6.26.1", - "resolved": "https://registry.npmjs.org/babel-generator/-/babel-generator-6.26.1.tgz", - "integrity": "sha512-HyfwY6ApZj7BYTcJURpM5tznulaBvyio7/0d4zFOeMPUmfxkCjHocCuoLa2SAGzBI8AREcH3eP3758F672DppA==", + "@babel/preset-modules": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.3.tgz", + "integrity": "sha512-Ra3JXOHBq2xd56xSF7lMKXdjBn3T772Y1Wet3yWnkDly9zHvJki029tAFzvAAK5cf4YV3yoxuP61crYRol6SVg==", + "dev": true, "requires": { - "babel-messages": "^6.23.0", - "babel-runtime": "^6.26.0", - "babel-types": "^6.26.0", - "detect-indent": "^4.0.0", - "jsesc": "^1.3.0", - "lodash": "^4.17.4", - "source-map": "^0.5.7", - "trim-right": "^1.0.1" + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", + "@babel/plugin-transform-dotall-regex": "^7.4.4", + "@babel/types": "^7.4.4", + "esutils": "^2.0.2" } }, - "babel-helper-builder-binary-assignment-operator-visitor": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-helper-builder-binary-assignment-operator-visitor/-/babel-helper-builder-binary-assignment-operator-visitor-6.24.1.tgz", - "integrity": "sha1-zORReto1b0IgvK6KAsKzRvmlZmQ=", + "@babel/preset-react": { + "version": "7.9.4", + "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.9.4.tgz", + "integrity": "sha512-AxylVB3FXeOTQXNXyiuAQJSvss62FEotbX2Pzx3K/7c+MKJMdSg6Ose6QYllkdCFA8EInCJVw7M/o5QbLuA4ZQ==", + "dev": true, "requires": { - "babel-helper-explode-assignable-expression": "^6.24.1", - "babel-runtime": "^6.22.0", - "babel-types": "^6.24.1" + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-transform-react-display-name": "^7.8.3", + "@babel/plugin-transform-react-jsx": "^7.9.4", + "@babel/plugin-transform-react-jsx-development": "^7.9.0", + "@babel/plugin-transform-react-jsx-self": "^7.9.0", + "@babel/plugin-transform-react-jsx-source": "^7.9.0" } }, - "babel-helper-builder-react-jsx": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-helper-builder-react-jsx/-/babel-helper-builder-react-jsx-6.26.0.tgz", - "integrity": "sha1-Of+DE7dci2Xc7/HzHTg+D/KkCKA=", + "@babel/preset-typescript": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.9.0.tgz", + "integrity": "sha512-S4cueFnGrIbvYJgwsVFKdvOmpiL0XGw9MFW9D0vgRys5g36PBhZRL8NX8Gr2akz8XRtzq6HuDXPD/1nniagNUg==", + "dev": true, "requires": { - "babel-runtime": "^6.26.0", - "babel-types": "^6.26.0", - "esutils": "^2.0.2" + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-transform-typescript": "^7.9.0" } }, - "babel-helper-call-delegate": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-helper-call-delegate/-/babel-helper-call-delegate-6.24.1.tgz", - "integrity": "sha1-7Oaqzdx25Bw0YfiL/Fdb0Nqi340=", + "@babel/runtime": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.2.0.tgz", + "integrity": "sha512-oouEibCbHMVdZSDlJBO6bZmID/zA/G/Qx3H1d3rSNPTD+L8UNKvCat7aKWSJ74zYbm5zWGh0GQN0hKj8zYFTCg==", "requires": { - "babel-helper-hoist-variables": "^6.24.1", - "babel-runtime": "^6.22.0", - "babel-traverse": "^6.24.1", - "babel-types": "^6.24.1" + "regenerator-runtime": "^0.12.0" } }, - "babel-helper-define-map": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-helper-define-map/-/babel-helper-define-map-6.26.0.tgz", - "integrity": "sha1-pfVtq0GiX5fstJjH66ypgZ+Vvl8=", + "@babel/runtime-corejs3": { + "version": "7.9.2", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.9.2.tgz", + "integrity": "sha512-HHxmgxbIzOfFlZ+tdeRKtaxWOMUoCG5Mu3wKeUmOxjYrwb3AAHgnmtCUbPPK11/raIWLIBK250t8E2BPO0p7jA==", + "dev": true, "requires": { - "babel-helper-function-name": "^6.24.1", - "babel-runtime": "^6.26.0", - "babel-types": "^6.26.0", - "lodash": "^4.17.4" + "core-js-pure": "^3.0.0", + "regenerator-runtime": "^0.13.4" + }, + "dependencies": { + "regenerator-runtime": { + "version": "0.13.5", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", + "integrity": "sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==", + "dev": true + } } }, - "babel-helper-explode-assignable-expression": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-helper-explode-assignable-expression/-/babel-helper-explode-assignable-expression-6.24.1.tgz", - "integrity": "sha1-8luCz33BBDPFX3BZLVdGQArCLKo=", + "@babel/template": { + "version": "7.8.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.8.6.tgz", + "integrity": "sha512-zbMsPMy/v0PWFZEhQJ66bqjhH+z0JgMoBWuikXybgG3Gkd/3t5oQ1Rw2WQhnSrsOmsKXnZOx15tkC4qON/+JPg==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0", - "babel-traverse": "^6.24.1", - "babel-types": "^6.24.1" + "@babel/code-frame": "^7.8.3", + "@babel/parser": "^7.8.6", + "@babel/types": "^7.8.6" + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", + "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", + "dev": true, + "requires": { + "@babel/highlight": "^7.8.3" + } + }, + "@babel/highlight": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.9.0.tgz", + "integrity": "sha512-lJZPilxX7Op3Nv/2cvFdnlepPXDxi29wxteT57Q965oc5R9v86ztx0jfxVrTcBk8C2kcPkkDa2Z4T3ZsPPVWsQ==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.0", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + } } }, - "babel-helper-function-name": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-helper-function-name/-/babel-helper-function-name-6.24.1.tgz", - "integrity": "sha1-00dbjAPtmCQqJbSDUasYOZ01gKk=", + "@babel/traverse": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.9.5.tgz", + "integrity": "sha512-c4gH3jsvSuGUezlP6rzSJ6jf8fYjLj3hsMZRx/nX0h+fmHN0w+ekubRrHPqnMec0meycA2nwCsJ7dC8IPem2FQ==", + "dev": true, "requires": { - "babel-helper-get-function-arity": "^6.24.1", - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1", - "babel-traverse": "^6.24.1", - "babel-types": "^6.24.1" + "@babel/code-frame": "^7.8.3", + "@babel/generator": "^7.9.5", + "@babel/helper-function-name": "^7.9.5", + "@babel/helper-split-export-declaration": "^7.8.3", + "@babel/parser": "^7.9.0", + "@babel/types": "^7.9.5", + "debug": "^4.1.0", + "globals": "^11.1.0", + "lodash": "^4.17.13" + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", + "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", + "dev": true, + "requires": { + "@babel/highlight": "^7.8.3" + } + }, + "@babel/highlight": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.9.0.tgz", + "integrity": "sha512-lJZPilxX7Op3Nv/2cvFdnlepPXDxi29wxteT57Q965oc5R9v86ztx0jfxVrTcBk8C2kcPkkDa2Z4T3ZsPPVWsQ==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.0", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + }, + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } } }, - "babel-helper-get-function-arity": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-helper-get-function-arity/-/babel-helper-get-function-arity-6.24.1.tgz", - "integrity": "sha1-j3eCqpNAfEHTqlCQj4mwMbG2hT0=", + "@babel/types": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.7.4.tgz", + "integrity": "sha512-cz5Ji23KCi4T+YIE/BolWosrJuSmoZeN1EFnRtBwF+KKLi8GG/Z2c2hOJJeCXPk4mwk4QFvTmwIodJowXgttRA==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0", - "babel-types": "^6.24.1" + "esutils": "^2.0.2", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + }, + "dependencies": { + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true + } } }, - "babel-helper-hoist-variables": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-helper-hoist-variables/-/babel-helper-hoist-variables-6.24.1.tgz", - "integrity": "sha1-HssnaJydJVE+rbyZFKc/VAi+enY=", + "@cnakazawa/watch": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@cnakazawa/watch/-/watch-1.0.4.tgz", + "integrity": "sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0", - "babel-types": "^6.24.1" + "exec-sh": "^0.3.2", + "minimist": "^1.2.0" } }, - "babel-helper-optimise-call-expression": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-helper-optimise-call-expression/-/babel-helper-optimise-call-expression-6.24.1.tgz", - "integrity": "sha1-96E0J7qfc/j0+pk8VKl4gtEkQlc=", - "requires": { - "babel-runtime": "^6.22.0", - "babel-types": "^6.24.1" - } + "@csstools/convert-colors": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@csstools/convert-colors/-/convert-colors-1.4.0.tgz", + "integrity": "sha512-5a6wqoJV/xEdbRNKVo6I4hO3VjyDq//8q2f9I6PBAvMesJHFauXDorcNCsr9RzvsZnaWi5NYCcfyqP1QeFHFbw==", + "dev": true }, - "babel-helper-regex": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-helper-regex/-/babel-helper-regex-6.26.0.tgz", - "integrity": "sha1-MlxZ+QL4LyS3T6zu0DY5VPZJXnI=", - "requires": { - "babel-runtime": "^6.26.0", - "babel-types": "^6.26.0", - "lodash": "^4.17.4" - } + "@csstools/normalize.css": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/@csstools/normalize.css/-/normalize.css-10.1.0.tgz", + "integrity": "sha512-ij4wRiunFfaJxjB0BdrYHIH8FxBJpOwNPhhAcunlmPdXudL1WQV1qoP9un6JsEBAgQH+7UXyyjh0g7jTxXK6tg==", + "dev": true }, - "babel-helper-remap-async-to-generator": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-helper-remap-async-to-generator/-/babel-helper-remap-async-to-generator-6.24.1.tgz", - "integrity": "sha1-XsWBgnrXI/7N04HxySg5BnbkVRs=", + "@google-cloud/common": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.2.3.tgz", + "integrity": "sha512-lvw54mGKn8VqVIy2NzAk0l5fntBFX4UwQhHk6HaqkyCQ7WBl5oz4XhzKMtMilozF/3ObPcDogqwuyEWyZ6rnQQ==", + "dev": true, "requires": { - "babel-helper-function-name": "^6.24.1", - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1", - "babel-traverse": "^6.24.1", - "babel-types": "^6.24.1" + "@google-cloud/projectify": "^1.0.0", + "@google-cloud/promisify": "^1.0.0", + "arrify": "^2.0.0", + "duplexify": "^3.6.0", + "ent": "^2.2.0", + "extend": "^3.0.2", + "google-auth-library": "^5.5.0", + "retry-request": "^4.0.0", + "teeny-request": "^5.2.1" + }, + "dependencies": { + "arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "dev": true + } } }, - "babel-helper-replace-supers": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-helper-replace-supers/-/babel-helper-replace-supers-6.24.1.tgz", - "integrity": "sha1-v22/5Dk40XNpohPKiov3S2qQqxo=", + "@google-cloud/paginator": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-2.0.2.tgz", + "integrity": "sha512-PCddVtZWvw0iZ3BLIsCXMBQvxUcS9O5CgfHBu8Zd8T3DCiML+oQED1odsbl3CQ9d3RrvBaj+eIh7Dv12D15PbA==", + "dev": true, "requires": { - "babel-helper-optimise-call-expression": "^6.24.1", - "babel-messages": "^6.23.0", - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1", - "babel-traverse": "^6.24.1", - "babel-types": "^6.24.1" + "arrify": "^2.0.0", + "extend": "^3.0.2" + }, + "dependencies": { + "arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "dev": true + } } }, - "babel-helpers": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-helpers/-/babel-helpers-6.24.1.tgz", - "integrity": "sha1-NHHenK7DiOXIUOWX5Yom3fN2ArI=", - "requires": { - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1" - } + "@google-cloud/projectify": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.2.tgz", + "integrity": "sha512-WnkGxvk4U1kAJpoS/Ehk+3MZXVW+XHHhwc/QyD6G8Za4xml3Fv+NRn/bYffl1TxSg+gE0N0mj9Shgc7e8+fl8A==", + "dev": true }, - "babel-jest": { - "version": "22.4.4", - "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-22.4.4.tgz", - "integrity": "sha512-A9NB6/lZhYyypR9ATryOSDcqBaqNdzq4U+CN+/wcMsLcmKkPxQEoTKLajGfd3IkxNyVBT8NewUK2nWyGbSzHEQ==", - "requires": { - "babel-plugin-istanbul": "^4.1.5", - "babel-preset-jest": "^22.4.4" - } + "@google-cloud/promisify": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.3.tgz", + "integrity": "sha512-Rufgfl3TnkIil3CjsH33Q6093zeoVqyqCdvtvgHuCqRJxCZYfaVPIyr8JViMeLTD4Ja630pRKKZVSjKggoVbNg==", + "dev": true }, - "babel-loader": { - "version": "7.1.5", - "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-7.1.5.tgz", - "integrity": "sha512-iCHfbieL5d1LfOQeeVJEUyD9rTwBcP/fcEbRCfempxTDuqrKpu0AZjLAQHEQa3Yqyj9ORKe2iHfoj4rHLf7xpw==", + "@google-cloud/storage": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-4.1.3.tgz", + "integrity": "sha512-79Ag+4eQq+KFJcKB85AimepoqTJOGuDLAmJd7JkLc8NM12a87JTCoGi65oi1eZ4H77AV0uUQxSS2Fo/hZL3+kQ==", + "dev": true, "requires": { - "find-cache-dir": "^1.0.0", - "loader-utils": "^1.0.2", - "mkdirp": "^0.5.1" + "@google-cloud/common": "^2.1.1", + "@google-cloud/paginator": "^2.0.0", + "@google-cloud/promisify": "^1.0.0", + "arrify": "^2.0.0", + "compressible": "^2.0.12", + "concat-stream": "^2.0.0", + "date-and-time": "^0.11.0", + "duplexify": "^3.5.0", + "extend": "^3.0.2", + "gaxios": "^2.0.1", + "gcs-resumable-upload": "^2.2.4", + "hash-stream-validation": "^0.2.2", + "mime": "^2.2.0", + "mime-types": "^2.0.8", + "onetime": "^5.1.0", + "p-limit": "^2.2.0", + "pumpify": "^2.0.0", + "readable-stream": "^3.4.0", + "snakeize": "^0.1.0", + "stream-events": "^1.0.1", + "through2": "^3.0.0", + "xdg-basedir": "^4.0.0" + }, + "dependencies": { + "arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "dev": true + }, + "concat-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-2.0.0.tgz", + "integrity": "sha512-MWufYdFw53ccGjCA+Ol7XJYpAlW6/prSMzuPOTRnJGcGzuhLn4Scrz7qf6o8bROZ514ltazcIFJZevcfbo0x7A==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.0.2", + "typedarray": "^0.0.6" + } + }, + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==", + "dev": true + }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, + "onetime": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", + "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "p-limit": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.1.tgz", + "integrity": "sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "pumpify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-2.0.1.tgz", + "integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==", + "dev": true, + "requires": { + "duplexify": "^4.1.1", + "inherits": "^2.0.3", + "pump": "^3.0.0" + }, + "dependencies": { + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "dev": true, + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + } + } + }, + "readable-stream": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "through2": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz", + "integrity": "sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==", + "dev": true, + "requires": { + "readable-stream": "2 || 3" + } + }, + "xdg-basedir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", + "dev": true + } } }, - "babel-messages": { - "version": "6.23.0", - "resolved": "https://registry.npmjs.org/babel-messages/-/babel-messages-6.23.0.tgz", - "integrity": "sha1-8830cDhYA1sqKVHG7F7fbGLyYw4=", - "requires": { - "babel-runtime": "^6.22.0" - } + "@hapi/address": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@hapi/address/-/address-2.1.4.tgz", + "integrity": "sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ==", + "dev": true }, - "babel-plugin-check-es2015-constants": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/babel-plugin-check-es2015-constants/-/babel-plugin-check-es2015-constants-6.22.0.tgz", - "integrity": "sha1-NRV7EBQm/S/9PaP3XH0ekYNbv4o=", - "requires": { - "babel-runtime": "^6.22.0" - } + "@hapi/bourne": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-1.3.2.tgz", + "integrity": "sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA==", + "dev": true }, - "babel-plugin-dynamic-import-node": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-1.1.0.tgz", - "integrity": "sha512-tTfZbM9Ecwj3GK50mnPrUpinTwA4xXmDiQGCk/aBYbvl1+X8YqldK86wZ1owVJ4u3mrKbRlXMma80J18qwiaTQ==", + "@hapi/hoek": { + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-8.5.1.tgz", + "integrity": "sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow==", + "dev": true + }, + "@hapi/joi": { + "version": "15.1.1", + "resolved": "https://registry.npmjs.org/@hapi/joi/-/joi-15.1.1.tgz", + "integrity": "sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ==", + "dev": true, "requires": { - "babel-plugin-syntax-dynamic-import": "^6.18.0", - "babel-template": "^6.26.0", - "babel-types": "^6.26.0" + "@hapi/address": "2.x.x", + "@hapi/bourne": "1.x.x", + "@hapi/hoek": "8.x.x", + "@hapi/topo": "3.x.x" } }, - "babel-plugin-istanbul": { - "version": "4.1.6", - "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-4.1.6.tgz", - "integrity": "sha512-PWP9FQ1AhZhS01T/4qLSKoHGY/xvkZdVBGlKM/HuxxS3+sC66HhTNR7+MpbO/so/cz/wY94MeSWJuP1hXIPfwQ==", + "@hapi/topo": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-3.1.6.tgz", + "integrity": "sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ==", + "dev": true, "requires": { - "babel-plugin-syntax-object-rest-spread": "^6.13.0", - "find-up": "^2.1.0", - "istanbul-lib-instrument": "^1.10.1", - "test-exclude": "^4.2.1" + "@hapi/hoek": "^8.3.0" } }, - "babel-plugin-jest-hoist": { - "version": "22.4.4", - "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-22.4.4.tgz", - "integrity": "sha512-DUvGfYaAIlkdnygVIEl0O4Av69NtuQWcrjMOv6DODPuhuGLDnbsARz3AwiiI/EkIMMlxQDUcrZ9yoyJvTNjcVQ==" - }, - "babel-plugin-syntax-async-functions": { - "version": "6.13.0", - "resolved": "http://registry.npmjs.org/babel-plugin-syntax-async-functions/-/babel-plugin-syntax-async-functions-6.13.0.tgz", - "integrity": "sha1-ytnK0RkbWtY0vzCuCHI5HgZHvpU=" - }, - "babel-plugin-syntax-class-properties": { - "version": "6.13.0", - "resolved": "http://registry.npmjs.org/babel-plugin-syntax-class-properties/-/babel-plugin-syntax-class-properties-6.13.0.tgz", - "integrity": "sha1-1+sjt5oxf4VDlixQW4J8fWysJ94=" - }, - "babel-plugin-syntax-dynamic-import": { - "version": "6.18.0", - "resolved": "http://registry.npmjs.org/babel-plugin-syntax-dynamic-import/-/babel-plugin-syntax-dynamic-import-6.18.0.tgz", - "integrity": "sha1-jWomIpyDdFqZgqRBBRVyyqF5sdo=" - }, - "babel-plugin-syntax-exponentiation-operator": { - "version": "6.13.0", - "resolved": "http://registry.npmjs.org/babel-plugin-syntax-exponentiation-operator/-/babel-plugin-syntax-exponentiation-operator-6.13.0.tgz", - "integrity": "sha1-nufoM3KQ2pUoggGmpX9BcDF4MN4=" - }, - "babel-plugin-syntax-flow": { - "version": "6.18.0", - "resolved": "http://registry.npmjs.org/babel-plugin-syntax-flow/-/babel-plugin-syntax-flow-6.18.0.tgz", - "integrity": "sha1-TDqyCiryaqIM0lmVw5jE63AxDI0=" - }, - "babel-plugin-syntax-jsx": { - "version": "6.18.0", - "resolved": "http://registry.npmjs.org/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz", - "integrity": "sha1-CvMqmm4Tyno/1QaeYtew9Y0NiUY=" - }, - "babel-plugin-syntax-object-rest-spread": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz", - "integrity": "sha1-/WU28rzhODb/o6VFjEkDpZe7O/U=" - }, - "babel-plugin-syntax-trailing-function-commas": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/babel-plugin-syntax-trailing-function-commas/-/babel-plugin-syntax-trailing-function-commas-6.22.0.tgz", - "integrity": "sha1-ugNgk3+NBuQBgKQ/4NVhb/9TLPM=" - }, - "babel-plugin-transform-async-to-generator": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-async-to-generator/-/babel-plugin-transform-async-to-generator-6.24.1.tgz", - "integrity": "sha1-ZTbjeK/2yx1VF6wOQOs+n8jQh2E=", + "@jest/console": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-24.9.0.tgz", + "integrity": "sha512-Zuj6b8TnKXi3q4ymac8EQfc3ea/uhLeCGThFqXeC8H9/raaH8ARPUTdId+XyGd03Z4In0/VjD2OYFcBF09fNLQ==", + "dev": true, "requires": { - "babel-helper-remap-async-to-generator": "^6.24.1", - "babel-plugin-syntax-async-functions": "^6.8.0", - "babel-runtime": "^6.22.0" + "@jest/source-map": "^24.9.0", + "chalk": "^2.0.1", + "slash": "^2.0.0" + }, + "dependencies": { + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "dev": true + } } }, - "babel-plugin-transform-class-properties": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-class-properties/-/babel-plugin-transform-class-properties-6.24.1.tgz", - "integrity": "sha1-anl2PqYdM9NvN7YRqp3vgagbRqw=", + "@jest/core": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-24.9.0.tgz", + "integrity": "sha512-Fogg3s4wlAr1VX7q+rhV9RVnUv5tD7VuWfYy1+whMiWUrvl7U3QJSJyWcDio9Lq2prqYsZaeTv2Rz24pWGkJ2A==", + "dev": true, "requires": { - "babel-helper-function-name": "^6.24.1", - "babel-plugin-syntax-class-properties": "^6.8.0", - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1" + "@jest/console": "^24.7.1", + "@jest/reporters": "^24.9.0", + "@jest/test-result": "^24.9.0", + "@jest/transform": "^24.9.0", + "@jest/types": "^24.9.0", + "ansi-escapes": "^3.0.0", + "chalk": "^2.0.1", + "exit": "^0.1.2", + "graceful-fs": "^4.1.15", + "jest-changed-files": "^24.9.0", + "jest-config": "^24.9.0", + "jest-haste-map": "^24.9.0", + "jest-message-util": "^24.9.0", + "jest-regex-util": "^24.3.0", + "jest-resolve": "^24.9.0", + "jest-resolve-dependencies": "^24.9.0", + "jest-runner": "^24.9.0", + "jest-runtime": "^24.9.0", + "jest-snapshot": "^24.9.0", + "jest-util": "^24.9.0", + "jest-validate": "^24.9.0", + "jest-watcher": "^24.9.0", + "micromatch": "^3.1.10", + "p-each-series": "^1.0.0", + "realpath-native": "^1.1.0", + "rimraf": "^2.5.4", + "slash": "^2.0.0", + "strip-ansi": "^5.0.0" } }, - "babel-plugin-transform-es2015-arrow-functions": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-arrow-functions/-/babel-plugin-transform-es2015-arrow-functions-6.22.0.tgz", - "integrity": "sha1-RSaSy3EdX3ncf4XkQM5BufJE0iE=", + "@jest/environment": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-24.9.0.tgz", + "integrity": "sha512-5A1QluTPhvdIPFYnO3sZC3smkNeXPVELz7ikPbhUj0bQjB07EoE9qtLrem14ZUYWdVayYbsjVwIiL4WBIMV4aQ==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0" + "@jest/fake-timers": "^24.9.0", + "@jest/transform": "^24.9.0", + "@jest/types": "^24.9.0", + "jest-mock": "^24.9.0" } }, - "babel-plugin-transform-es2015-block-scoped-functions": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-block-scoped-functions/-/babel-plugin-transform-es2015-block-scoped-functions-6.22.0.tgz", - "integrity": "sha1-u8UbSflk1wy42OC5ToICRs46YUE=", + "@jest/fake-timers": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-24.9.0.tgz", + "integrity": "sha512-eWQcNa2YSwzXWIMC5KufBh3oWRIijrQFROsIqt6v/NS9Io/gknw1jsAC9c+ih/RQX4A3O7SeWAhQeN0goKhT9A==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0" + "@jest/types": "^24.9.0", + "jest-message-util": "^24.9.0", + "jest-mock": "^24.9.0" } }, - "babel-plugin-transform-es2015-block-scoping": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.26.0.tgz", - "integrity": "sha1-1w9SmcEwjQXBL0Y4E7CgnnOxiV8=", + "@jest/reporters": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-24.9.0.tgz", + "integrity": "sha512-mu4X0yjaHrffOsWmVLzitKmmmWSQ3GGuefgNscUSWNiUNcEOSEQk9k3pERKEQVBb0Cnn88+UESIsZEMH3o88Gw==", + "dev": true, "requires": { - "babel-runtime": "^6.26.0", - "babel-template": "^6.26.0", - "babel-traverse": "^6.26.0", - "babel-types": "^6.26.0", - "lodash": "^4.17.4" + "@jest/environment": "^24.9.0", + "@jest/test-result": "^24.9.0", + "@jest/transform": "^24.9.0", + "@jest/types": "^24.9.0", + "chalk": "^2.0.1", + "exit": "^0.1.2", + "glob": "^7.1.2", + "istanbul-lib-coverage": "^2.0.2", + "istanbul-lib-instrument": "^3.0.1", + "istanbul-lib-report": "^2.0.4", + "istanbul-lib-source-maps": "^3.0.1", + "istanbul-reports": "^2.2.6", + "jest-haste-map": "^24.9.0", + "jest-resolve": "^24.9.0", + "jest-runtime": "^24.9.0", + "jest-util": "^24.9.0", + "jest-worker": "^24.6.0", + "node-notifier": "^5.4.2", + "slash": "^2.0.0", + "source-map": "^0.6.0", + "string-length": "^2.0.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, - "babel-plugin-transform-es2015-classes": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-classes/-/babel-plugin-transform-es2015-classes-6.24.1.tgz", - "integrity": "sha1-WkxYpQyclGHlZLSyo7+ryXolhNs=", + "@jest/source-map": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-24.9.0.tgz", + "integrity": "sha512-/Xw7xGlsZb4MJzNDgB7PW5crou5JqWiBQaz6xyPd3ArOg2nfn/PunV8+olXbbEZzNl591o5rWKE9BRDaFAuIBg==", + "dev": true, "requires": { - "babel-helper-define-map": "^6.24.1", - "babel-helper-function-name": "^6.24.1", - "babel-helper-optimise-call-expression": "^6.24.1", - "babel-helper-replace-supers": "^6.24.1", - "babel-messages": "^6.23.0", - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1", - "babel-traverse": "^6.24.1", - "babel-types": "^6.24.1" + "callsites": "^3.0.0", + "graceful-fs": "^4.1.15", + "source-map": "^0.6.0" + }, + "dependencies": { + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, - "babel-plugin-transform-es2015-computed-properties": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-computed-properties/-/babel-plugin-transform-es2015-computed-properties-6.24.1.tgz", - "integrity": "sha1-b+Ko0WiV1WNPTNmZttNICjCBWbM=", + "@jest/test-result": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-24.9.0.tgz", + "integrity": "sha512-XEFrHbBonBJ8dGp2JmF8kP/nQI/ImPpygKHwQ/SY+es59Z3L5PI4Qb9TQQMAEeYsThG1xF0k6tmG0tIKATNiiA==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1" + "@jest/console": "^24.9.0", + "@jest/types": "^24.9.0", + "@types/istanbul-lib-coverage": "^2.0.0" } }, - "babel-plugin-transform-es2015-destructuring": { - "version": "6.23.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-destructuring/-/babel-plugin-transform-es2015-destructuring-6.23.0.tgz", - "integrity": "sha1-mXux8auWf2gtKwh2/jWNYOdlxW0=", + "@jest/test-sequencer": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-24.9.0.tgz", + "integrity": "sha512-6qqsU4o0kW1dvA95qfNog8v8gkRN9ph6Lz7r96IvZpHdNipP2cBcb07J1Z45mz/VIS01OHJ3pY8T5fUY38tg4A==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0" + "@jest/test-result": "^24.9.0", + "jest-haste-map": "^24.9.0", + "jest-runner": "^24.9.0", + "jest-runtime": "^24.9.0" } }, - "babel-plugin-transform-es2015-duplicate-keys": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-duplicate-keys/-/babel-plugin-transform-es2015-duplicate-keys-6.24.1.tgz", - "integrity": "sha1-c+s9MQypaePvnskcU3QabxV2Qj4=", + "@jest/transform": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-24.9.0.tgz", + "integrity": "sha512-TcQUmyNRxV94S0QpMOnZl0++6RMiqpbH/ZMccFB/amku6Uwvyb1cjYX7xkp5nGNkbX4QPH/FcB6q1HBTHynLmQ==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0", - "babel-types": "^6.24.1" + "@babel/core": "^7.1.0", + "@jest/types": "^24.9.0", + "babel-plugin-istanbul": "^5.1.0", + "chalk": "^2.0.1", + "convert-source-map": "^1.4.0", + "fast-json-stable-stringify": "^2.0.0", + "graceful-fs": "^4.1.15", + "jest-haste-map": "^24.9.0", + "jest-regex-util": "^24.9.0", + "jest-util": "^24.9.0", + "micromatch": "^3.1.10", + "pirates": "^4.0.1", + "realpath-native": "^1.1.0", + "slash": "^2.0.0", + "source-map": "^0.6.1", + "write-file-atomic": "2.4.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, - "babel-plugin-transform-es2015-for-of": { - "version": "6.23.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-for-of/-/babel-plugin-transform-es2015-for-of-6.23.0.tgz", - "integrity": "sha1-9HyVsrYT3x0+zC/bdXNiPHUkhpE=", + "@jest/types": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-24.9.0.tgz", + "integrity": "sha512-XKK7ze1apu5JWQ5eZjHITP66AX+QsLlbaJRBGYr8pNzwcAE2JVkwnf0yqjHTsDRcjR0mujy/NmZMXw5kl+kGBw==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0" + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^1.1.1", + "@types/yargs": "^13.0.0" + } + }, + "@kubeflow/frontend": { + "version": "github:kubeflow/frontend#fffc59d88485e50f5e3c8fc3cc6c2d6188071883", + "from": "github:kubeflow/frontend#fffc59d", + "requires": { + "@material-ui/core": "^3.7.1", + "@material-ui/icons": "^3.0.1", + "@types/lodash.groupby": "^4.6.6", + "google-protobuf": "^3.11.2", + "grpc-web": "^1.0.7", + "lodash.debounce": "^4.0.8", + "lodash.flatten": "^4.4.0", + "lodash.groupby": "^4.6.0", + "lodash.isfunction": "^3.0.9", + "react-svg-line-chart": "^2.0.2" + }, + "dependencies": { + "google-protobuf": { + "version": "3.11.4", + "resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.11.4.tgz", + "integrity": "sha512-lL6b04rDirurUBOgsY2+LalI6Evq8eH5TcNzi7TYQ3BsIWelT0KSOQSBsXuavEkNf+odQU6c0lgz3UsZXeNX9Q==" + } } }, - "babel-plugin-transform-es2015-function-name": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-function-name/-/babel-plugin-transform-es2015-function-name-6.24.1.tgz", - "integrity": "sha1-g0yJhTvDaxrw86TF26qU/Y6sqos=", + "@material-ui/core": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/@material-ui/core/-/core-3.7.1.tgz", + "integrity": "sha512-CjIGwvzn84BgzXWzC9M/Tz2gDI7AfUe3G1JXkZQAVy+ddPikh+iZwn5snnElfcjuC+ahXxaIyK49ARt3NM49vQ==", "requires": { - "babel-helper-function-name": "^6.24.1", - "babel-runtime": "^6.22.0", - "babel-types": "^6.24.1" + "@babel/runtime": "7.2.0", + "@material-ui/utils": "^3.0.0-alpha.1", + "@types/jss": "^9.5.6", + "@types/react-transition-group": "^2.0.8", + "brcast": "^3.0.1", + "classnames": "^2.2.5", + "csstype": "^2.5.2", + "debounce": "^1.1.0", + "deepmerge": "^3.0.0", + "dom-helpers": "^3.2.1", + "hoist-non-react-statics": "^3.2.1", + "is-plain-object": "^2.0.4", + "jss": "^9.8.7", + "jss-camel-case": "^6.0.0", + "jss-default-unit": "^8.0.2", + "jss-global": "^3.0.0", + "jss-nested": "^6.0.1", + "jss-props-sort": "^6.0.0", + "jss-vendor-prefixer": "^7.0.0", + "keycode": "^2.1.9", + "normalize-scroll-left": "^0.1.2", + "popper.js": "^1.14.1", + "prop-types": "^15.6.0", + "react-event-listener": "^0.6.2", + "react-transition-group": "^2.2.1", + "recompose": "0.28.0 - 0.30.0", + "warning": "^4.0.1" } }, - "babel-plugin-transform-es2015-literals": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-literals/-/babel-plugin-transform-es2015-literals-6.22.0.tgz", - "integrity": "sha1-T1SgLWzWbPkVKAAZox0xklN3yi4=", + "@material-ui/icons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@material-ui/icons/-/icons-3.0.1.tgz", + "integrity": "sha512-1kNcxYiIT1x8iDPEAlgmKrfRTIV8UyK6fLVcZ9kMHIKGWft9I451V5mvSrbCjbf7MX1TbLWzZjph0aVCRf9MqQ==", "requires": { - "babel-runtime": "^6.22.0" + "@babel/runtime": "7.0.0", + "recompose": "^0.29.0" + }, + "dependencies": { + "@babel/runtime": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.0.0.tgz", + "integrity": "sha512-7hGhzlcmg01CvH1EHdSPVXYX1aJ8KCEyz6I9xYIi/asDtzBPMyMhVibhM/K6g/5qnKBwjZtp10bNZIEFTRW1MA==", + "requires": { + "regenerator-runtime": "^0.12.0" + } + }, + "hoist-non-react-statics": { + "version": "2.5.5", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-2.5.5.tgz", + "integrity": "sha512-rqcy4pJo55FTTLWt+bU8ukscqHeE/e9KWvsOW2b/a3afxQZhwkQdT1rPPCJ0rYXdj4vNcasY8zHTH+jF/qStxw==" + }, + "recompose": { + "version": "0.29.0", + "resolved": "https://registry.npmjs.org/recompose/-/recompose-0.29.0.tgz", + "integrity": "sha512-J/qLXNU4W+AeHCDR70ajW8eMd1uroqZaECTj6qqDLPMILz3y0EzpYlvrnxKB9DnqcngWrtGwjXY9JeXaW9kS1A==", + "requires": { + "@babel/runtime": "^7.0.0", + "change-emitter": "^0.1.2", + "fbjs": "^0.8.1", + "hoist-non-react-statics": "^2.3.1", + "react-lifecycles-compat": "^3.0.2", + "symbol-observable": "^1.0.4" + } + } } }, - "babel-plugin-transform-es2015-modules-amd": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-amd/-/babel-plugin-transform-es2015-modules-amd-6.24.1.tgz", - "integrity": "sha1-Oz5UAXI5hC1tGcMBHEvS8AoA0VQ=", + "@material-ui/utils": { + "version": "3.0.0-alpha.2", + "resolved": "https://registry.npmjs.org/@material-ui/utils/-/utils-3.0.0-alpha.2.tgz", + "integrity": "sha512-dnxwCXMSLFFXiQm3EK/Ikxm4oYvr3WxxVoPZ0Uh4CBqZNr0J8nzDNlDcGP/0UeC134UUz2ZwgbTL/smxrGZ8sg==", "requires": { - "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1" + "@babel/runtime": "7.2.0", + "prop-types": "^15.6.0", + "react-is": "^16.6.3" } }, - "babel-plugin-transform-es2015-modules-commonjs": { - "version": "6.26.2", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-commonjs/-/babel-plugin-transform-es2015-modules-commonjs-6.26.2.tgz", - "integrity": "sha512-CV9ROOHEdrjcwhIaJNBGMBCodN+1cfkwtM1SbUHmvyy35KGT7fohbpOxkE2uLz1o6odKK2Ck/tz47z+VqQfi9Q==", + "@mrmlnc/readdir-enhanced": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz", + "integrity": "sha512-bPHp6Ji8b41szTOcaP63VlnbbO5Ny6dwAATtY6JTjh5N2OLrb5Qk/Th5cRkRQhkWCt+EJsYrNB0MiL+Gpn6e3g==", + "dev": true, "requires": { - "babel-plugin-transform-strict-mode": "^6.24.1", - "babel-runtime": "^6.26.0", - "babel-template": "^6.26.0", - "babel-types": "^6.26.0" + "call-me-maybe": "^1.0.1", + "glob-to-regexp": "^0.3.0" } }, - "babel-plugin-transform-es2015-modules-systemjs": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-systemjs/-/babel-plugin-transform-es2015-modules-systemjs-6.24.1.tgz", - "integrity": "sha1-/4mhQrkRmpBhlfXxBuzzBdlAfSM=", - "requires": { - "babel-helper-hoist-variables": "^6.24.1", - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1" - } + "@nodelib/fs.stat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz", + "integrity": "sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==", + "dev": true }, - "babel-plugin-transform-es2015-modules-umd": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-modules-umd/-/babel-plugin-transform-es2015-modules-umd-6.24.1.tgz", - "integrity": "sha1-rJl+YoXNGO1hdq22B9YCNErThGg=", - "requires": { - "babel-plugin-transform-es2015-modules-amd": "^6.24.1", - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1" - } + "@sheerun/mutationobserver-shim": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@sheerun/mutationobserver-shim/-/mutationobserver-shim-0.3.2.tgz", + "integrity": "sha512-vTCdPp/T/Q3oSqwHmZ5Kpa9oI7iLtGl3RQaA/NyLHikvcrPxACkkKVr/XzkSPJWXHRhKGzVvb0urJsbMlRxi1Q==", + "dev": true }, - "babel-plugin-transform-es2015-object-super": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-object-super/-/babel-plugin-transform-es2015-object-super-6.24.1.tgz", - "integrity": "sha1-JM72muIcuDp/hgPa0CH1cusnj40=", - "requires": { - "babel-helper-replace-supers": "^6.24.1", - "babel-runtime": "^6.22.0" - } + "@svgr/babel-plugin-add-jsx-attribute": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-4.2.0.tgz", + "integrity": "sha512-j7KnilGyZzYr/jhcrSYS3FGWMZVaqyCG0vzMCwzvei0coIkczuYMcniK07nI0aHJINciujjH11T72ICW5eL5Ig==", + "dev": true }, - "babel-plugin-transform-es2015-parameters": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-parameters/-/babel-plugin-transform-es2015-parameters-6.24.1.tgz", - "integrity": "sha1-V6w1GrScrxSpfNE7CfZv3wpiXys=", - "requires": { - "babel-helper-call-delegate": "^6.24.1", - "babel-helper-get-function-arity": "^6.24.1", - "babel-runtime": "^6.22.0", - "babel-template": "^6.24.1", - "babel-traverse": "^6.24.1", - "babel-types": "^6.24.1" - } + "@svgr/babel-plugin-remove-jsx-attribute": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-4.2.0.tgz", + "integrity": "sha512-3XHLtJ+HbRCH4n28S7y/yZoEQnRpl0tvTZQsHqvaeNXPra+6vE5tbRliH3ox1yZYPCxrlqaJT/Mg+75GpDKlvQ==", + "dev": true }, - "babel-plugin-transform-es2015-shorthand-properties": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-shorthand-properties/-/babel-plugin-transform-es2015-shorthand-properties-6.24.1.tgz", - "integrity": "sha1-JPh11nIch2YbvZmkYi5R8U3jiqA=", - "requires": { - "babel-runtime": "^6.22.0", - "babel-types": "^6.24.1" - } + "@svgr/babel-plugin-remove-jsx-empty-expression": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-4.2.0.tgz", + "integrity": "sha512-yTr2iLdf6oEuUE9MsRdvt0NmdpMBAkgK8Bjhl6epb+eQWk6abBaX3d65UZ3E3FWaOwePyUgNyNCMVG61gGCQ7w==", + "dev": true }, - "babel-plugin-transform-es2015-spread": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-spread/-/babel-plugin-transform-es2015-spread-6.22.0.tgz", - "integrity": "sha1-1taKmfia7cRTbIGlQujdnxdG+NE=", - "requires": { - "babel-runtime": "^6.22.0" - } + "@svgr/babel-plugin-replace-jsx-attribute-value": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-4.2.0.tgz", + "integrity": "sha512-U9m870Kqm0ko8beHawRXLGLvSi/ZMrl89gJ5BNcT452fAjtF2p4uRzXkdzvGJJJYBgx7BmqlDjBN/eCp5AAX2w==", + "dev": true }, - "babel-plugin-transform-es2015-sticky-regex": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-sticky-regex/-/babel-plugin-transform-es2015-sticky-regex-6.24.1.tgz", - "integrity": "sha1-AMHNsaynERLN8M9hJsLta0V8zbw=", - "requires": { - "babel-helper-regex": "^6.24.1", - "babel-runtime": "^6.22.0", - "babel-types": "^6.24.1" - } + "@svgr/babel-plugin-svg-dynamic-title": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-4.3.3.tgz", + "integrity": "sha512-w3Be6xUNdwgParsvxkkeZb545VhXEwjGMwExMVBIdPQJeyMQHqm9Msnb2a1teHBqUYL66qtwfhNkbj1iarCG7w==", + "dev": true }, - "babel-plugin-transform-es2015-template-literals": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-template-literals/-/babel-plugin-transform-es2015-template-literals-6.22.0.tgz", - "integrity": "sha1-qEs0UPfp+PH2g51taH2oS7EjbY0=", - "requires": { - "babel-runtime": "^6.22.0" - } + "@svgr/babel-plugin-svg-em-dimensions": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-4.2.0.tgz", + "integrity": "sha512-C0Uy+BHolCHGOZ8Dnr1zXy/KgpBOkEUYY9kI/HseHVPeMbluaX3CijJr7D4C5uR8zrc1T64nnq/k63ydQuGt4w==", + "dev": true }, - "babel-plugin-transform-es2015-typeof-symbol": { - "version": "6.23.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-typeof-symbol/-/babel-plugin-transform-es2015-typeof-symbol-6.23.0.tgz", - "integrity": "sha1-3sCfHN3/lLUqxz1QXITfWdzOs3I=", - "requires": { - "babel-runtime": "^6.22.0" - } + "@svgr/babel-plugin-transform-react-native-svg": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-4.2.0.tgz", + "integrity": "sha512-7YvynOpZDpCOUoIVlaaOUU87J4Z6RdD6spYN4eUb5tfPoKGSF9OG2NuhgYnq4jSkAxcpMaXWPf1cePkzmqTPNw==", + "dev": true }, - "babel-plugin-transform-es2015-unicode-regex": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-unicode-regex/-/babel-plugin-transform-es2015-unicode-regex-6.24.1.tgz", - "integrity": "sha1-04sS9C6nMj9yk4fxinxa4frrNek=", - "requires": { - "babel-helper-regex": "^6.24.1", - "babel-runtime": "^6.22.0", - "regexpu-core": "^2.0.0" - } + "@svgr/babel-plugin-transform-svg-component": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-4.2.0.tgz", + "integrity": "sha512-hYfYuZhQPCBVotABsXKSCfel2slf/yvJY8heTVX1PCTaq/IgASq1IyxPPKJ0chWREEKewIU/JMSsIGBtK1KKxw==", + "dev": true }, - "babel-plugin-transform-exponentiation-operator": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-exponentiation-operator/-/babel-plugin-transform-exponentiation-operator-6.24.1.tgz", - "integrity": "sha1-KrDJx/MJj6SJB3cruBP+QejeOg4=", + "@svgr/babel-preset": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-4.3.3.tgz", + "integrity": "sha512-6PG80tdz4eAlYUN3g5GZiUjg2FMcp+Wn6rtnz5WJG9ITGEF1pmFdzq02597Hn0OmnQuCVaBYQE1OVFAnwOl+0A==", + "dev": true, "requires": { - "babel-helper-builder-binary-assignment-operator-visitor": "^6.24.1", - "babel-plugin-syntax-exponentiation-operator": "^6.8.0", - "babel-runtime": "^6.22.0" + "@svgr/babel-plugin-add-jsx-attribute": "^4.2.0", + "@svgr/babel-plugin-remove-jsx-attribute": "^4.2.0", + "@svgr/babel-plugin-remove-jsx-empty-expression": "^4.2.0", + "@svgr/babel-plugin-replace-jsx-attribute-value": "^4.2.0", + "@svgr/babel-plugin-svg-dynamic-title": "^4.3.3", + "@svgr/babel-plugin-svg-em-dimensions": "^4.2.0", + "@svgr/babel-plugin-transform-react-native-svg": "^4.2.0", + "@svgr/babel-plugin-transform-svg-component": "^4.2.0" } }, - "babel-plugin-transform-flow-strip-types": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-flow-strip-types/-/babel-plugin-transform-flow-strip-types-6.22.0.tgz", - "integrity": "sha1-hMtnKTXUNxT9wyvOhFaNh0Qc988=", + "@svgr/core": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/@svgr/core/-/core-4.3.3.tgz", + "integrity": "sha512-qNuGF1QON1626UCaZamWt5yedpgOytvLj5BQZe2j1k1B8DUG4OyugZyfEwBeXozCUwhLEpsrgPrE+eCu4fY17w==", + "dev": true, "requires": { - "babel-plugin-syntax-flow": "^6.18.0", - "babel-runtime": "^6.22.0" + "@svgr/plugin-jsx": "^4.3.3", + "camelcase": "^5.3.1", + "cosmiconfig": "^5.2.1" } }, - "babel-plugin-transform-object-rest-spread": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz", - "integrity": "sha1-DzZpLVD+9rfi1LOsFHgTepY7ewY=", + "@svgr/hast-util-to-babel-ast": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-4.3.2.tgz", + "integrity": "sha512-JioXclZGhFIDL3ddn4Kiq8qEqYM2PyDKV0aYno8+IXTLuYt6TOgHUbUAAFvqtb0Xn37NwP0BTHglejFoYr8RZg==", + "dev": true, "requires": { - "babel-plugin-syntax-object-rest-spread": "^6.8.0", - "babel-runtime": "^6.26.0" + "@babel/types": "^7.4.4" } }, - "babel-plugin-transform-react-constant-elements": { - "version": "6.23.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-constant-elements/-/babel-plugin-transform-react-constant-elements-6.23.0.tgz", - "integrity": "sha1-LxGb9NLN1F65uqrldAU8YE9hR90=", + "@svgr/plugin-jsx": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-4.3.3.tgz", + "integrity": "sha512-cLOCSpNWQnDB1/v+SUENHH7a0XY09bfuMKdq9+gYvtuwzC2rU4I0wKGFEp1i24holdQdwodCtDQdFtJiTCWc+w==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0" + "@babel/core": "^7.4.5", + "@svgr/babel-preset": "^4.3.3", + "@svgr/hast-util-to-babel-ast": "^4.3.2", + "svg-parser": "^2.0.0" } }, - "babel-plugin-transform-react-display-name": { - "version": "6.25.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-display-name/-/babel-plugin-transform-react-display-name-6.25.0.tgz", - "integrity": "sha1-Z+K/Hx6ck6sI25Z5LgU5K/LMKNE=", + "@svgr/plugin-svgo": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-4.3.1.tgz", + "integrity": "sha512-PrMtEDUWjX3Ea65JsVCwTIXuSqa3CG9px+DluF1/eo9mlDrgrtFE7NE/DjdhjJgSM9wenlVBzkzneSIUgfUI/w==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0" + "cosmiconfig": "^5.2.1", + "merge-deep": "^3.0.2", + "svgo": "^1.2.2" } }, - "babel-plugin-transform-react-jsx": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-jsx/-/babel-plugin-transform-react-jsx-6.24.1.tgz", - "integrity": "sha1-hAoCjn30YN/DotKfDA2R9jduZqM=", + "@svgr/webpack": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/@svgr/webpack/-/webpack-4.3.3.tgz", + "integrity": "sha512-bjnWolZ6KVsHhgyCoYRFmbd26p8XVbulCzSG53BDQqAr+JOAderYK7CuYrB3bDjHJuF6LJ7Wrr42+goLRV9qIg==", + "dev": true, "requires": { - "babel-helper-builder-react-jsx": "^6.24.1", - "babel-plugin-syntax-jsx": "^6.8.0", - "babel-runtime": "^6.22.0" + "@babel/core": "^7.4.5", + "@babel/plugin-transform-react-constant-elements": "^7.0.0", + "@babel/preset-env": "^7.4.5", + "@babel/preset-react": "^7.0.0", + "@svgr/core": "^4.3.3", + "@svgr/plugin-jsx": "^4.3.3", + "@svgr/plugin-svgo": "^4.3.1", + "loader-utils": "^1.2.3" } }, - "babel-plugin-transform-react-jsx-self": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-jsx-self/-/babel-plugin-transform-react-jsx-self-6.22.0.tgz", - "integrity": "sha1-322AqdomEqEh5t3XVYvL7PBuY24=", + "@testing-library/dom": { + "version": "6.12.2", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-6.12.2.tgz", + "integrity": "sha512-KCnvHra5fV+wDxg3wJObGvZFxq7v1DJt829GNFLuRDjKxVNc/B5AdsylNF5PMHFbWMXDsHwM26d2NZcZO9KjbQ==", + "dev": true, "requires": { - "babel-plugin-syntax-jsx": "^6.8.0", - "babel-runtime": "^6.22.0" + "@babel/runtime": "^7.6.2", + "@sheerun/mutationobserver-shim": "^0.3.2", + "@types/testing-library__dom": "^6.0.0", + "aria-query": "3.0.0", + "pretty-format": "^24.9.0", + "wait-for-expect": "^3.0.0" + }, + "dependencies": { + "@babel/runtime": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.8.4.tgz", + "integrity": "sha512-neAp3zt80trRVBI1x0azq6c57aNBqYZH8KhMm3TaB7wEI5Q4A2SHfBHE8w9gOhI/lrqxtEbXZgQIrHP+wvSGwQ==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.2" + } + }, + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "pretty-format": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-24.9.0.tgz", + "integrity": "sha512-00ZMZUiHaJrNfk33guavqgvfJS30sLYf0f8+Srklv0AMPodGGHcoHgksZ3OThYnIvOd+8yMCn0YiEOogjlgsnA==", + "dev": true, + "requires": { + "@jest/types": "^24.9.0", + "ansi-regex": "^4.0.0", + "ansi-styles": "^3.2.0", + "react-is": "^16.8.4" + } + }, + "react-is": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.12.0.tgz", + "integrity": "sha512-rPCkf/mWBtKc97aLL9/txD8DZdemK0vkA3JMLShjlJB3Pj3s+lpf1KaBzMfQrAmhMQB0n1cU/SUGgKKBCe837Q==", + "dev": true + }, + "regenerator-runtime": { + "version": "0.13.3", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz", + "integrity": "sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw==", + "dev": true + } } }, - "babel-plugin-transform-react-jsx-source": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-jsx-source/-/babel-plugin-transform-react-jsx-source-6.22.0.tgz", - "integrity": "sha1-ZqwSFT9c0tF7PBkmj0vwGX9E7NY=", + "@testing-library/react": { + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-9.4.0.tgz", + "integrity": "sha512-XdhDWkI4GktUPsz0AYyeQ8M9qS/JFie06kcSnUVcpgOwFjAu9vhwR83qBl+lw9yZWkbECjL8Hd+n5hH6C0oWqg==", + "dev": true, "requires": { - "babel-plugin-syntax-jsx": "^6.8.0", - "babel-runtime": "^6.22.0" + "@babel/runtime": "^7.7.6", + "@testing-library/dom": "^6.11.0", + "@types/testing-library__react": "^9.1.2" + }, + "dependencies": { + "@babel/runtime": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.8.4.tgz", + "integrity": "sha512-neAp3zt80trRVBI1x0azq6c57aNBqYZH8KhMm3TaB7wEI5Q4A2SHfBHE8w9gOhI/lrqxtEbXZgQIrHP+wvSGwQ==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.2" + } + }, + "regenerator-runtime": { + "version": "0.13.3", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz", + "integrity": "sha512-naKIZz2GQ8JWh///G7L3X6LaQUAMp2lvb1rvwwsURe/VXwD6VMfr+/1NuNw3ag8v2kY1aQ/go5SNn79O9JU7yw==", + "dev": true + } } }, - "babel-plugin-transform-regenerator": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-regenerator/-/babel-plugin-transform-regenerator-6.26.0.tgz", - "integrity": "sha1-4HA2lvveJ/Cj78rPi03KL3s6jy8=", + "@types/babel__core": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.7.tgz", + "integrity": "sha512-RL62NqSFPCDK2FM1pSDH0scHpJvsXtZNiYlMB73DgPBaG1E38ZYVL+ei5EkWRbr+KC4YNiAUNBnRj+bgwpgjMw==", + "dev": true, "requires": { - "regenerator-transform": "^0.10.0" + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" } }, - "babel-plugin-transform-runtime": { - "version": "6.23.0", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-runtime/-/babel-plugin-transform-runtime-6.23.0.tgz", - "integrity": "sha1-iEkNRGUC6puOfvsP4J7E2ZR5se4=", + "@types/babel__generator": { + "version": "7.6.1", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.1.tgz", + "integrity": "sha512-bBKm+2VPJcMRVwNhxKu8W+5/zT7pwNEqeokFOmbvVSqGzFneNxYcEBro9Ac7/N9tlsaPYnZLK8J1LWKkMsLAew==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0" + "@babel/types": "^7.0.0" } }, - "babel-plugin-transform-strict-mode": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-plugin-transform-strict-mode/-/babel-plugin-transform-strict-mode-6.24.1.tgz", - "integrity": "sha1-1fr3qleKZbvlkc9e2uBKDGcCB1g=", + "@types/babel__template": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.0.2.tgz", + "integrity": "sha512-/K6zCpeW7Imzgab2bLkLEbz0+1JlFSrUMdw7KoIIu+IUdu51GWaBZpd3y1VXGVXzynvGa4DaIaxNZHiON3GXUg==", + "dev": true, "requires": { - "babel-runtime": "^6.22.0", - "babel-types": "^6.24.1" + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" } }, - "babel-polyfill": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-polyfill/-/babel-polyfill-6.26.0.tgz", - "integrity": "sha1-N5k3q8Z9eJWXCtxiHyhM2WbPIVM=", + "@types/babel__traverse": { + "version": "7.0.10", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.0.10.tgz", + "integrity": "sha512-74fNdUGrWsgIB/V9kTO5FGHPWYY6Eqn+3Z7L6Hc4e/BxjYV7puvBqp5HwsVYYfLm6iURYBNCx4Ut37OF9yitCw==", "dev": true, "requires": { - "babel-runtime": "^6.26.0", - "core-js": "^2.5.0", - "regenerator-runtime": "^0.10.5" - }, - "dependencies": { - "core-js": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.1.tgz", - "integrity": "sha512-L72mmmEayPJBejKIWe2pYtGis5r0tQ5NaJekdhyXgeMQTpJoBsH0NL4ElY2LfSoV15xeQWKQ+XTTOZdyero5Xg==", - "dev": true - }, - "regenerator-runtime": { - "version": "0.10.5", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.10.5.tgz", - "integrity": "sha1-M2w+/BIgrc7dosn6tntaeVWjNlg=", - "dev": true - } + "@babel/types": "^7.3.0" } }, - "babel-preset-env": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/babel-preset-env/-/babel-preset-env-1.6.1.tgz", - "integrity": "sha512-W6VIyA6Ch9ePMI7VptNn2wBM6dbG0eSz25HEiL40nQXCsXGTGZSTZu1Iap+cj3Q0S5a7T9+529l/5Bkvd+afNA==", - "requires": { - "babel-plugin-check-es2015-constants": "^6.22.0", - "babel-plugin-syntax-trailing-function-commas": "^6.22.0", - "babel-plugin-transform-async-to-generator": "^6.22.0", - "babel-plugin-transform-es2015-arrow-functions": "^6.22.0", - "babel-plugin-transform-es2015-block-scoped-functions": "^6.22.0", - "babel-plugin-transform-es2015-block-scoping": "^6.23.0", - "babel-plugin-transform-es2015-classes": "^6.23.0", - "babel-plugin-transform-es2015-computed-properties": "^6.22.0", - "babel-plugin-transform-es2015-destructuring": "^6.23.0", - "babel-plugin-transform-es2015-duplicate-keys": "^6.22.0", - "babel-plugin-transform-es2015-for-of": "^6.23.0", - "babel-plugin-transform-es2015-function-name": "^6.22.0", - "babel-plugin-transform-es2015-literals": "^6.22.0", - "babel-plugin-transform-es2015-modules-amd": "^6.22.0", - "babel-plugin-transform-es2015-modules-commonjs": "^6.23.0", - "babel-plugin-transform-es2015-modules-systemjs": "^6.23.0", - "babel-plugin-transform-es2015-modules-umd": "^6.23.0", - "babel-plugin-transform-es2015-object-super": "^6.22.0", - "babel-plugin-transform-es2015-parameters": "^6.23.0", - "babel-plugin-transform-es2015-shorthand-properties": "^6.22.0", - "babel-plugin-transform-es2015-spread": "^6.22.0", - "babel-plugin-transform-es2015-sticky-regex": "^6.22.0", - "babel-plugin-transform-es2015-template-literals": "^6.22.0", - "babel-plugin-transform-es2015-typeof-symbol": "^6.23.0", - "babel-plugin-transform-es2015-unicode-regex": "^6.22.0", - "babel-plugin-transform-exponentiation-operator": "^6.22.0", - "babel-plugin-transform-regenerator": "^6.22.0", - "browserslist": "^2.1.2", - "invariant": "^2.2.2", - "semver": "^5.3.0" + "@types/body-parser": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.17.0.tgz", + "integrity": "sha512-a2+YeUjPkztKJu5aIF2yArYFQQp8d51wZ7DavSHjFuY1mqVgidGyzEQ41JIVNy82fXj8yPgy2vJmfIywgESW6w==", + "dev": true, + "requires": { + "@types/connect": "*", + "@types/node": "*" } }, - "babel-preset-flow": { - "version": "6.23.0", - "resolved": "https://registry.npmjs.org/babel-preset-flow/-/babel-preset-flow-6.23.0.tgz", - "integrity": "sha1-5xIYiHCFrpoktb5Baa/7WZgWxJ0=", + "@types/cheerio": { + "version": "0.22.13", + "resolved": "https://registry.npmjs.org/@types/cheerio/-/cheerio-0.22.13.tgz", + "integrity": "sha512-OZd7dCUOUkiTorf97vJKwZnSja/DmHfuBAroe1kREZZTCf/tlFecwHhsOos3uVHxeKGZDwzolIrCUApClkdLuA==", + "dev": true, "requires": { - "babel-plugin-transform-flow-strip-types": "^6.22.0" + "@types/node": "*" } }, - "babel-preset-jest": { - "version": "22.4.4", - "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-22.4.4.tgz", - "integrity": "sha512-+dxMtOFwnSYWfum0NaEc0O03oSdwBsjx4tMSChRDPGwu/4wSY6Q6ANW3wkjKpJzzguaovRs/DODcT4hbSN8yiA==", + "@types/color-name": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", + "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", + "dev": true + }, + "@types/connect": { + "version": "3.4.32", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.32.tgz", + "integrity": "sha512-4r8qa0quOvh7lGD0pre62CAb1oni1OO6ecJLGCezTmhQ8Fz50Arx9RUszryR8KlgK6avuSXvviL6yWyViQABOg==", + "dev": true, "requires": { - "babel-plugin-jest-hoist": "^22.4.4", - "babel-plugin-syntax-object-rest-spread": "^6.13.0" + "@types/node": "*" } }, - "babel-preset-react": { - "version": "6.24.1", - "resolved": "https://registry.npmjs.org/babel-preset-react/-/babel-preset-react-6.24.1.tgz", - "integrity": "sha1-umnfrqRfw+xjm2pOzqbhdwLJE4A=", + "@types/d3": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/@types/d3/-/d3-5.5.0.tgz", + "integrity": "sha512-Bz9EAhWnaO93jLYSAT13blgzwP5Z0grO5THBOXSMeWHIIFHA7ntJSLpHSCr1kDtQunEZKCYT9OfE+4lYY/PwlA==", + "dev": true, "requires": { - "babel-plugin-syntax-jsx": "^6.3.13", - "babel-plugin-transform-react-display-name": "^6.23.0", - "babel-plugin-transform-react-jsx": "^6.24.1", - "babel-plugin-transform-react-jsx-self": "^6.22.0", - "babel-plugin-transform-react-jsx-source": "^6.22.0", - "babel-preset-flow": "^6.23.0" + "@types/d3-array": "*", + "@types/d3-axis": "*", + "@types/d3-brush": "*", + "@types/d3-chord": "*", + "@types/d3-collection": "*", + "@types/d3-color": "*", + "@types/d3-contour": "*", + "@types/d3-dispatch": "*", + "@types/d3-drag": "*", + "@types/d3-dsv": "*", + "@types/d3-ease": "*", + "@types/d3-fetch": "*", + "@types/d3-force": "*", + "@types/d3-format": "*", + "@types/d3-geo": "*", + "@types/d3-hierarchy": "*", + "@types/d3-interpolate": "*", + "@types/d3-path": "*", + "@types/d3-polygon": "*", + "@types/d3-quadtree": "*", + "@types/d3-random": "*", + "@types/d3-scale": "*", + "@types/d3-scale-chromatic": "*", + "@types/d3-selection": "*", + "@types/d3-shape": "*", + "@types/d3-time": "*", + "@types/d3-time-format": "*", + "@types/d3-timer": "*", + "@types/d3-transition": "*", + "@types/d3-voronoi": "*", + "@types/d3-zoom": "*" } }, - "babel-preset-react-app": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/babel-preset-react-app/-/babel-preset-react-app-3.1.2.tgz", - "integrity": "sha512-/sh5Qd5T08PYa6t4kuCdKh9tXp6/m/Jwyx7PJTqugsYMfsDUJMlBXOs5EwFODHprzjWrmQ0SydnMZu9FY4MZYg==", - "requires": { - "babel-plugin-dynamic-import-node": "1.1.0", - "babel-plugin-syntax-dynamic-import": "6.18.0", - "babel-plugin-transform-class-properties": "6.24.1", - "babel-plugin-transform-es2015-destructuring": "6.23.0", - "babel-plugin-transform-object-rest-spread": "6.26.0", - "babel-plugin-transform-react-constant-elements": "6.23.0", - "babel-plugin-transform-react-jsx": "6.24.1", - "babel-plugin-transform-react-jsx-self": "6.22.0", - "babel-plugin-transform-react-jsx-source": "6.22.0", - "babel-plugin-transform-regenerator": "6.26.0", - "babel-plugin-transform-runtime": "6.23.0", - "babel-preset-env": "1.6.1", - "babel-preset-react": "6.24.1" - } - }, - "babel-register": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-register/-/babel-register-6.26.0.tgz", - "integrity": "sha1-btAhFz4vy0htestFxgCahW9kcHE=", + "@types/d3-array": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-1.2.4.tgz", + "integrity": "sha512-3r1fOAAb+SGfcOGXty/LGvoP0ovMec4UtGNUyHOSzYyvSGpmt+eNMxLowol/3HryusevznSfcHZebEShXMwsZA==", + "dev": true + }, + "@types/d3-axis": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-1.0.11.tgz", + "integrity": "sha512-cuigApCyCwYJxaQPghj+BqaxzbdRdT/lpZBMtF7EuEIJ61NMQ8yvGnqFvHCIgJEmUu2Wb2wiZqy9kiHi3Ddftg==", + "dev": true, "requires": { - "babel-core": "^6.26.0", - "babel-runtime": "^6.26.0", - "core-js": "^2.5.0", - "home-or-tmp": "^2.0.0", - "lodash": "^4.17.4", - "mkdirp": "^0.5.1", - "source-map-support": "^0.4.15" - }, - "dependencies": { - "core-js": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.1.tgz", - "integrity": "sha512-L72mmmEayPJBejKIWe2pYtGis5r0tQ5NaJekdhyXgeMQTpJoBsH0NL4ElY2LfSoV15xeQWKQ+XTTOZdyero5Xg==" - }, - "source-map-support": { - "version": "0.4.18", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.4.18.tgz", - "integrity": "sha512-try0/JqxPLF9nOjvSta7tVondkP5dwgyLDjVoyMDlmjugT2lRZ1OfsrYTkCd2hkDnJTKRbO/Rl3orm8vlsUzbA==", - "requires": { - "source-map": "^0.5.6" - } - } + "@types/d3-selection": "*" } }, - "babel-runtime": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz", - "integrity": "sha1-llxwWGaOgrVde/4E/yM3vItWR/4=", + "@types/d3-brush": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-1.0.9.tgz", + "integrity": "sha512-mAx8IVc0luUHfk51pl0UN1vzybnAzLMUsvIwLt3fbsqqPkSXr+Pu1AxOPPeyNc27LhHJnfH/LCV7Jlv+Yzqu1A==", + "dev": true, "requires": { - "core-js": "^2.4.0", - "regenerator-runtime": "^0.11.0" - }, - "dependencies": { - "core-js": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.1.tgz", - "integrity": "sha512-L72mmmEayPJBejKIWe2pYtGis5r0tQ5NaJekdhyXgeMQTpJoBsH0NL4ElY2LfSoV15xeQWKQ+XTTOZdyero5Xg==" - }, - "regenerator-runtime": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz", - "integrity": "sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==" - } + "@types/d3-selection": "*" } }, - "babel-template": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-template/-/babel-template-6.26.0.tgz", - "integrity": "sha1-3gPi0WOWsGn0bdn/+FIfsaDjXgI=", + "@types/d3-chord": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-1.0.8.tgz", + "integrity": "sha512-F0ftYOo7FenAIxsRjXLt8vbij0NLDuVcL+xaGY7R9jUmF2Mrpj1T5XukBI9Cad+Ei7YSxEWREIO+CYcaKCl2qQ==", + "dev": true + }, + "@types/d3-collection": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-collection/-/d3-collection-1.0.7.tgz", + "integrity": "sha512-vR3BT0GwHc5y93Jv6bxn3zoxP/vGu+GdXu/r1ApjbP9dLk9I2g6NiV7iP/QMQSuFZd0It0n/qWrfXHxCWwHIkg==", + "dev": true + }, + "@types/d3-color": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-1.2.1.tgz", + "integrity": "sha512-xwb1tqvYNWllbHuhMFhiXk63Imf+QNq/dJdmbXmr2wQVnwGenCuj3/0IWJ9hdIFQIqzvhT7T37cvx93jtAsDbQ==", + "dev": true + }, + "@types/d3-contour": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-1.3.0.tgz", + "integrity": "sha512-AUCUIjEnC5lCGBM9hS+MryRaFLIrPls4Rbv6ktqbd+TK/RXZPwOy9rtBWmGpbeXcSOYCJTUDwNJuEnmYPJRxHQ==", + "dev": true, "requires": { - "babel-runtime": "^6.26.0", - "babel-traverse": "^6.26.0", - "babel-types": "^6.26.0", - "babylon": "^6.18.0", - "lodash": "^4.17.4" + "@types/d3-array": "*", + "@types/geojson": "*" } }, - "babel-traverse": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-traverse/-/babel-traverse-6.26.0.tgz", - "integrity": "sha1-RqnL1+3MYsjlwGTi0tjQ9ANXZu4=", + "@types/d3-dispatch": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-1.0.6.tgz", + "integrity": "sha512-xyWJQMr832vqhu6fD/YqX+MSFBWnkxasNhcStvlhqygXxj0cKqPft0wuGoH5TIq5ADXgP83qeNVa4R7bEYN3uA==", + "dev": true + }, + "@types/d3-drag": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-1.2.2.tgz", + "integrity": "sha512-+UKFeaMVTfSQvMO0PTzOyLXSr7OZbF2Rx1iNVwo2XsyiOsd4MSuLyJKUwRmGn67044QpbNzr+VD6/8iBBLExWw==", + "dev": true, "requires": { - "babel-code-frame": "^6.26.0", - "babel-messages": "^6.23.0", - "babel-runtime": "^6.26.0", - "babel-types": "^6.26.0", - "babylon": "^6.18.0", - "debug": "^2.6.8", - "globals": "^9.18.0", - "invariant": "^2.2.2", - "lodash": "^4.17.4" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - } + "@types/d3-selection": "*" } }, - "babel-types": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-types/-/babel-types-6.26.0.tgz", - "integrity": "sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc=", + "@types/d3-dsv": { + "version": "1.0.35", + "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-1.0.35.tgz", + "integrity": "sha512-QeH7cN9phcm68TDwpSGmzE71/JtGoKZ2rZJABNUMQ7nYIhHkm2UldqI1Cp2pjEo8ycSeutudjzq+Lfim/ZCadQ==", + "dev": true + }, + "@types/d3-ease": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-1.0.7.tgz", + "integrity": "sha1-k6MBhovp4VBh89RDQ7GrP4rLbwk=", + "dev": true + }, + "@types/d3-fetch": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-1.1.5.tgz", + "integrity": "sha512-o9c0ItT5/Gl3wbNuVpzRnYX1t3RghzeWAjHUVLuyZJudiTxC4f/fC0ZPFWLQ2lVY8pAMmxpV8TJ6ETYCgPeI3A==", + "dev": true, "requires": { - "babel-runtime": "^6.26.0", - "esutils": "^2.0.2", - "lodash": "^4.17.4", - "to-fast-properties": "^1.0.3" + "@types/d3-dsv": "*" } }, - "babylon": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz", - "integrity": "sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==" + "@types/d3-force": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-1.1.1.tgz", + "integrity": "sha512-ePkELuaFWY4yOuf+Bvx5Xd+ihFiYG4bdnW0BlvigovIm8Sob2t76e9RGO6lybQbv6AlW9Icn9HuZ9fmdzEoJyg==", + "dev": true + }, + "@types/d3-format": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-1.3.0.tgz", + "integrity": "sha512-ZiY4j3iJvAdOwzwW24WjlZbUNvqOsnPAMfPBmdXqxj3uKJbrzBlRrdGl5uC89pZpFs9Dc92E81KcwG2uEgkIZA==", + "dev": true }, - "backstopjs": { - "version": "3.8.5", - "resolved": "https://registry.npmjs.org/backstopjs/-/backstopjs-3.8.5.tgz", - "integrity": "sha512-APiVAjy/ubQLKvQX3J7JH2esHHtKFIMTmMeGOnD5HDsAz1EQjfDU5TNWvCC+5sxN64YfHtSdd5r5eq48HMGhXw==", + "@types/d3-geo": { + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-1.10.3.tgz", + "integrity": "sha512-hfdaxM2L0wA9mDZrrSf2o+DyhEpnJYCiAN+lHFtpfZOVCQrYBA5g33sGRpUbAvjSMyO5jkHbftMWPEhuCMChSg==", "dev": true, "requires": { - "casperjs": "^1.1.0-beta5", - "chalk": "^1.1.3", - "chromy": "0.5.11", - "diverged": "^0.1.2", - "fs-extra": "^0.30.0", - "jump.js": "^1.0.2", - "junit-report-builder": "^1.3.1", - "lodash": "^4.17.11", - "minimist": "^1.2.0", - "node-resemble-js": "^0.2.0", - "object-hash": "1.1.5", - "opn": "^5.3.0", - "os": "^0.1.1", - "p-map": "^1.1.1", - "path": "^0.12.7", - "phantomjs-prebuilt": "^2.1.16", - "portfinder": "^1.0.17", - "puppeteer": "^1.2.0-next.1523485686787", - "super-simple-web-server": "^1.1.0", - "temp": "^0.8.3" - }, - "dependencies": { - "fs-extra": { - "version": "0.30.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.30.0.tgz", - "integrity": "sha1-8jP/zAjU2n1DLapEl3aYnbHfk/A=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "jsonfile": "^2.1.0", - "klaw": "^1.0.0", - "path-is-absolute": "^1.0.0", - "rimraf": "^2.2.8" - } - }, - "jsonfile": { - "version": "2.4.0", - "resolved": "http://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", - "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.6" - } - }, - "opn": { - "version": "5.4.0", - "resolved": "https://registry.npmjs.org/opn/-/opn-5.4.0.tgz", - "integrity": "sha512-YF9MNdVy/0qvJvDtunAOzFw9iasOQHpVthTCvGzxt61Il64AYSGdK+rYwld7NAfk9qJ7dt+hymBNSc9LNYS+Sw==", - "dev": true, - "requires": { - "is-wsl": "^1.1.0" - } - } + "@types/geojson": "*" } }, - "balanced-match": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-0.4.2.tgz", - "integrity": "sha1-yz8+PHMtwPAe5wtAPzAuYddwmDg=" + "@types/d3-hierarchy": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-1.1.5.tgz", + "integrity": "sha512-DKhqURrURt2c7MsF9sHiF2wrWf2+yZR4Q9oIG026t/ZY4VWoM0Yd7UonaR+rygyReWcFSEjKC/+5A27TgD8R8g==", + "dev": true }, - "base": { - "version": "0.11.2", - "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", - "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", + "@types/d3-interpolate": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-1.3.0.tgz", + "integrity": "sha512-Ng4ds7kPSvP/c3W3J5PPUQlgewif1tGBqCeh5lgY+UG82Y7H9zQ8c2gILsEFDLg7wRGOwnuKZ940Q/LSN14w9w==", + "dev": true, "requires": { - "cache-base": "^1.0.1", - "class-utils": "^0.3.5", - "component-emitter": "^1.2.1", - "define-property": "^1.0.0", - "isobject": "^3.0.1", - "mixin-deep": "^1.2.0", - "pascalcase": "^0.1.1" - }, - "dependencies": { - "define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", - "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", - "requires": { - "is-descriptor": "^1.0.0" - } - }, - "is-accessor-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", - "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-data-descriptor": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", - "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", - "requires": { - "kind-of": "^6.0.0" - } - }, - "is-descriptor": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", - "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", - "requires": { - "is-accessor-descriptor": "^1.0.0", - "is-data-descriptor": "^1.0.0", - "kind-of": "^6.0.2" - } - } + "@types/d3-color": "*" } }, - "base64-js": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", - "integrity": "sha512-ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw==" + "@types/d3-path": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-1.0.7.tgz", + "integrity": "sha512-U8dFRG+8WhkLJr2sxZ9Cw/5WeRgBnNqMxGdA1+Z0+ZG6tK0s75OQ4OXnxeyfKuh6E4wQPY8OAKr1+iNDx01BEQ==", + "dev": true }, - "batch": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", - "integrity": "sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY=" + "@types/d3-polygon": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-1.0.6.tgz", + "integrity": "sha512-E6Kyodn9JThgLq20nxSbEce9ow5/ePgm9PX2EO6W1INIL4DayM7cFaiG10DStuamjYAd0X4rntW2q+GRjiIktw==", + "dev": true }, - "bcrypt-pbkdf": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "@types/d3-quadtree": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-1.0.6.tgz", + "integrity": "sha512-sphVuDdiSIaxLt9kQgebJW98pTktQ/xuN7Ysd8X68Rnjeg/q8+c36/ShlqU52qoKg9nob/JEHH1uQMdxURZidQ==", + "dev": true + }, + "@types/d3-random": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-1.1.1.tgz", + "integrity": "sha512-jUPeBq1XKK9/5XasTvy5QAUwFeMsjma2yt/nP02yC2Tijovx7i/W5776U/HZugxc5SSmtpx4Z3g9KFVon0QrjQ==", + "dev": true + }, + "@types/d3-scale": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-2.1.0.tgz", + "integrity": "sha512-vLzRDF5lRxZdCLUOvmw90pkiuSsZdgroBQaat0Ov7Z7OnO9iJsPSm/TZw3wW6m2z/NhIn1E4N0RLNfEi1k4kAA==", + "dev": true, "requires": { - "tweetnacl": "^0.14.3" + "@types/d3-time": "*" } }, - "bfj": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/bfj/-/bfj-6.1.1.tgz", - "integrity": "sha512-+GUNvzHR4nRyGybQc2WpNJL4MJazMuvf92ueIyA0bIkPRwhhQu3IfZQ2PSoVPpCBJfmoSdOxu5rnotfFLlvYRQ==", + "@types/d3-scale-chromatic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-1.3.0.tgz", + "integrity": "sha512-JqQH5uu1kmdQEa6XSu7NYzQM71lL1YreBPS5o8SnmEDcBRKL6ooykXa8iFPPOEUiTah25ydi+cTrbsogBSMNSQ==", + "dev": true + }, + "@types/d3-selection": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-1.3.4.tgz", + "integrity": "sha512-WQ6Ivy7VuUlZ/Grqc8493ZxC+y/fpvZLy5+8ELvmCr2hll8eJPUqC05l6fgRRA7kjqlpbH7lbmvY6pRKf6yzxw==", + "dev": true + }, + "@types/d3-shape": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-1.2.7.tgz", + "integrity": "sha512-b2jpGcddOseeNxchaR1SNLqA5xZAbgKix3cXiFeuGeYIEAEUu91UbtelCxOHIUTbNURFnjcbkf4plRbejNzVaQ==", "dev": true, "requires": { - "bluebird": "^3.5.1", - "check-types": "^7.3.0", - "hoopy": "^0.1.2", - "tryer": "^1.0.0" + "@types/d3-path": "*" } }, - "big.js": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", - "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==" + "@types/d3-time": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-1.0.9.tgz", + "integrity": "sha512-m+D4NbQdDlTVaO7QgXAnatR3IDxQYDMBtRhgSCi5rs9R1LPq1y7/2aqa1FJ2IWjFm1mOV63swDxonnCDlHgHMA==", + "dev": true }, - "bignumber.js": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-2.4.0.tgz", - "integrity": "sha1-g4qZLan51zfg9LLbC+YrsJ3Qxeg=", + "@types/d3-time-format": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-2.1.0.tgz", + "integrity": "sha512-/myT3I7EwlukNOX2xVdMzb8FRgNzRMpsZddwst9Ld/VFe6LyJyRp0s32l/V9XoUzk+Gqu56F/oGk6507+8BxrA==", "dev": true }, - "binary-extensions": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.12.0.tgz", - "integrity": "sha512-DYWGk01lDcxeS/K9IHPGWfT8PsJmbXRtRd2Sx72Tnb8pcYZQFF1oSDb8hJtS1vhp212q1Rzi5dUf9+nq0o9UIg==" + "@types/d3-timer": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-1.0.8.tgz", + "integrity": "sha512-AKUgQ/nljUFcUO2P3gK24weVI5XwUTdJvjoh8gJ0yxT4aJ+d7t2Or3TB+k9dEYl14BAjoj32D0ky+YzQSVszfg==", + "dev": true }, - "bluebird": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.3.tgz", - "integrity": "sha512-/qKPUQlaW1OyR51WeCPBvRnAlnZFUJkCSG5HzGnuIqhgyJtF+T94lFnn33eiazjRm2LAHVy2guNnaq48X9SJuw==" + "@types/d3-transition": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-1.1.3.tgz", + "integrity": "sha512-1EukXNuVu/z2G1GZpZagzFJnie9C5zze17ox/vhTgGXNy46rYAm4UkhLLlUeeZ1ndq88k95SOeC8898RpKMLOQ==", + "dev": true, + "requires": { + "@types/d3-selection": "*" + } }, - "bmp-js": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/bmp-js/-/bmp-js-0.0.3.tgz", - "integrity": "sha1-ZBE+nHzxICs3btYHvzBibr5XsYo=", + "@types/d3-voronoi": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/@types/d3-voronoi/-/d3-voronoi-1.1.8.tgz", + "integrity": "sha512-zqNhW7QsYQGlfOdrwPNPG3Wk64zUa4epKRurkJ/dVc6oeXrB+iTDt8sRZ0KZKOOXvvfa1dcdB0e45TZeLBiodQ==", "dev": true }, - "bn.js": { - "version": "4.11.8", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.8.tgz", - "integrity": "sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA==" + "@types/d3-zoom": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-1.7.3.tgz", + "integrity": "sha512-Tz7+z4+Id0MxERw/ozinC5QHJmGLARs9Mpi/7VVfiR+9AHcFGe9q+fjQa30/oPNY8WPuCh5p5uuXmBYAJ3y91Q==", + "dev": true, + "requires": { + "@types/d3-interpolate": "*", + "@types/d3-selection": "*" + } }, - "body-parser": { - "version": "1.18.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.3.tgz", - "integrity": "sha1-WykhmP/dVTs6DyDe0FkrlWlVyLQ=", + "@types/dagre": { + "version": "0.7.40", + "resolved": "https://registry.npmjs.org/@types/dagre/-/dagre-0.7.40.tgz", + "integrity": "sha512-XvaIdpHNW4AeyvPpKuvdpN8yn4RZBztPhJtLeHRtspCNUcgrZn/B9lA0KMIUlyVdIH77f82KzErR+H/ayrnoyQ==", + "dev": true + }, + "@types/enzyme": { + "version": "3.10.3", + "resolved": "https://registry.npmjs.org/@types/enzyme/-/enzyme-3.10.3.tgz", + "integrity": "sha512-f/Kcb84sZOSZiBPCkr4He9/cpuSLcKRyQaEE20Q30Prx0Dn6wcyMAWI0yofL6yvd9Ht9G7EVkQeRqK0n5w8ILw==", + "dev": true, "requires": { - "bytes": "3.0.0", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "~1.1.2", - "http-errors": "~1.6.3", - "iconv-lite": "0.4.23", - "on-finished": "~2.3.0", - "qs": "6.5.2", - "raw-body": "2.3.3", - "type-is": "~1.6.16" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "iconv-lite": { - "version": "0.4.23", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", - "integrity": "sha512-neyTUVFtahjf0mB3dZT77u+8O0QB89jFdnBkd5P1JgYPbPaia3gXXOVL2fq8VyU2gMMD7SaN7QukTB/pmXYvDA==", - "requires": { - "safer-buffer": ">= 2.1.2 < 3" - } - } + "@types/cheerio": "*", + "@types/react": "*" } }, - "bonjour": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/bonjour/-/bonjour-3.5.0.tgz", - "integrity": "sha1-jokKGD2O6aI5OzhExpGkK897yfU=", + "@types/enzyme-adapter-react-16": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/enzyme-adapter-react-16/-/enzyme-adapter-react-16-1.0.5.tgz", + "integrity": "sha512-K7HLFTkBDN5RyRmU90JuYt8OWEY2iKUn43SDWEoBOXd/PowUWjLZ3Q6qMBiQuZeFYK/TOstaZxsnI0fXoAfLpg==", + "dev": true, "requires": { - "array-flatten": "^2.1.0", - "deep-equal": "^1.0.1", - "dns-equal": "^1.0.0", - "dns-txt": "^2.0.2", - "multicast-dns": "^6.0.1", - "multicast-dns-service-types": "^1.1.0" + "@types/enzyme": "*" } }, - "boolbase": { + "@types/eslint-visitor-keys": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=" + "resolved": "https://registry.npmjs.org/@types/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", + "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", + "dev": true }, - "boxen": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/boxen/-/boxen-1.3.0.tgz", - "integrity": "sha512-TNPjfTr432qx7yOjQyaXm3dSR0MH9vXp7eT1BFSl/C51g+EFnOR9hTg1IreahGBmDNCehscshe45f+C1TBZbLw==", + "@types/events": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@types/events/-/events-1.2.0.tgz", + "integrity": "sha512-KEIlhXnIutzKwRbQkGWb/I4HFqBuUykAdHgDED6xqwXJfONCjF5VoE0cXEiurh3XauygxzeDzgtXUqvLkxFzzA==", + "dev": true + }, + "@types/express": { + "version": "4.16.0", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.16.0.tgz", + "integrity": "sha512-TtPEYumsmSTtTetAPXlJVf3kEqb6wZK0bZojpJQrnD/djV4q1oB6QQ8aKvKqwNPACoe02GNiy5zDzcYivR5Z2w==", + "dev": true, "requires": { - "ansi-align": "^2.0.0", - "camelcase": "^4.0.0", - "chalk": "^2.0.1", - "cli-boxes": "^1.0.0", - "string-width": "^2.0.0", - "term-size": "^1.2.0", - "widest-line": "^2.0.0" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } + "@types/body-parser": "*", + "@types/express-serve-static-core": "*", + "@types/serve-static": "*" } }, - "brace": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/brace/-/brace-0.11.1.tgz", - "integrity": "sha1-SJb8ydVE7vRfS7dmDbMg07N5/lg=" - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "@types/express-serve-static-core": { + "version": "4.16.0", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.16.0.tgz", + "integrity": "sha512-lTeoCu5NxJU4OD9moCgm0ESZzweAx0YqsAcab6OB0EB3+As1OaHtKnaGJvcngQxYsi9UNv0abn4/DRavrRxt4w==", + "dev": true, "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - }, - "dependencies": { - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" - } + "@types/events": "*", + "@types/node": "*", + "@types/range-parser": "*" } }, - "braces": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", - "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "@types/geojson": { + "version": "7946.0.4", + "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.4.tgz", + "integrity": "sha512-MHmwBtCb7OCv1DSivz2UNJXPGU/1btAWRKlqJ2saEhVJkpkvqHMMaOpKg0v4sAbDWSQekHGvPVMM8nQ+Jen03Q==", + "dev": true + }, + "@types/glob": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.1.1.tgz", + "integrity": "sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w==", + "dev": true, "requires": { - "arr-flatten": "^1.1.0", - "array-unique": "^0.3.2", - "extend-shallow": "^2.0.1", - "fill-range": "^4.0.0", - "isobject": "^3.0.1", - "repeat-element": "^1.1.2", - "snapdragon": "^0.8.1", - "snapdragon-node": "^2.0.1", - "split-string": "^3.0.2", - "to-regex": "^3.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - } + "@types/events": "*", + "@types/minimatch": "*", + "@types/node": "*" } }, - "brcast": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/brcast/-/brcast-3.0.1.tgz", - "integrity": "sha512-eI3yqf9YEqyGl9PCNTR46MGvDylGtaHjalcz6Q3fAPnP/PhpKkkve52vFdfGpwp4VUvK6LUr4TQN+2stCrEwTg==" - }, - "brorand": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", - "integrity": "sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=" + "@types/google-protobuf": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/@types/google-protobuf/-/google-protobuf-3.7.2.tgz", + "integrity": "sha512-ifFemzjNchFBCtHS6bZNhSZCBu7tbtOe0e8qY0z2J4HtFXmPJjm6fXSaQsTG7yhShBEZtt2oP/bkwu5k+emlkQ==", + "dev": true }, - "browser-process-hrtime": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-0.1.3.tgz", - "integrity": "sha512-bRFnI4NnjO6cnyLmOV/7PVoDEMJChlcfN0z4s1YMBY989/SvlfMI1lgCnkFUs53e9gQF+w7qu7XdllSTiSl8Aw==" + "@types/history": { + "version": "4.7.2", + "resolved": "https://registry.npmjs.org/@types/history/-/history-4.7.2.tgz", + "integrity": "sha512-ui3WwXmjTaY73fOQ3/m3nnajU/Orhi6cEu5rzX+BrAAJxa3eITXZ5ch9suPqtM03OWhAHhPSyBGCN4UKoxO20Q==", + "dev": true }, - "browser-resolve": { - "version": "1.11.3", - "resolved": "https://registry.npmjs.org/browser-resolve/-/browser-resolve-1.11.3.tgz", - "integrity": "sha512-exDi1BYWB/6raKHmDTCicQfTkqwN5fioMFV4j8BsfMU4R2DK/QfZfK7kOVkmWCNANf0snkBzqGqAJBao9gZMdQ==", + "@types/http-proxy": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.16.2.tgz", + "integrity": "sha512-GgqePmC3rlsn1nv+kx5OviPuUBU2omhnlXOaJSXFgOdsTcScNFap+OaCb2ip9Bm4m5L8EOehgT5d9M4uNB90zg==", + "dev": true, "requires": { - "resolve": "1.1.7" - }, - "dependencies": { - "resolve": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.1.7.tgz", - "integrity": "sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs=" - } + "@types/events": "*", + "@types/node": "*" } }, - "browserify-aes": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", - "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", + "@types/http-proxy-middleware": { + "version": "0.17.6", + "resolved": "https://registry.npmjs.org/@types/http-proxy-middleware/-/http-proxy-middleware-0.17.6.tgz", + "integrity": "sha512-NocuMc3omR+yySlkgZlNUDyJa9ENGuwX8Ev7Y9zO//H989drWp18Fn+oAgZZIPu+JWtNinIxENK2TZvo53o3tw==", + "dev": true, "requires": { - "buffer-xor": "^1.0.3", - "cipher-base": "^1.0.0", - "create-hash": "^1.1.0", - "evp_bytestokey": "^1.0.3", - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" + "@types/connect": "*", + "@types/http-proxy": "*", + "@types/node": "*" } }, - "browserify-cipher": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz", - "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==", - "requires": { - "browserify-aes": "^1.0.4", - "browserify-des": "^1.0.0", - "evp_bytestokey": "^1.0.0" - } + "@types/istanbul-lib-coverage": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.1.tgz", + "integrity": "sha512-hRJD2ahnnpLgsj6KWMYSrmXkM3rm2Dl1qkx6IOFD5FnuNPXJIG5L0dhgKXCYTRMGzU4n0wImQ/xfmRc4POUFlg==", + "dev": true }, - "browserify-des": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz", - "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==", + "@types/istanbul-lib-report": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-1.1.1.tgz", + "integrity": "sha512-3BUTyMzbZa2DtDI2BkERNC6jJw2Mr2Y0oGI7mRxYNBPxppbtEK1F66u3bKwU2g+wxwWI7PAoRpJnOY1grJqzHg==", + "dev": true, "requires": { - "cipher-base": "^1.0.1", - "des.js": "^1.0.0", - "inherits": "^2.0.1", - "safe-buffer": "^5.1.2" + "@types/istanbul-lib-coverage": "*" } }, - "browserify-rsa": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", - "integrity": "sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ=", + "@types/istanbul-reports": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-1.1.1.tgz", + "integrity": "sha512-UpYjBi8xefVChsCoBpKShdxTllC9pwISirfoZsUa2AAdQg/Jd2KQGtSbw+ya7GPo7x/wAPlH6JBhKhAsXUEZNA==", + "dev": true, "requires": { - "bn.js": "^4.1.0", - "randombytes": "^2.0.1" + "@types/istanbul-lib-coverage": "*", + "@types/istanbul-lib-report": "*" } }, - "browserify-sign": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.0.4.tgz", - "integrity": "sha1-qk62jl17ZYuqa/alfmMMvXqT0pg=", - "requires": { - "bn.js": "^4.1.1", - "browserify-rsa": "^4.0.0", - "create-hash": "^1.1.0", - "create-hmac": "^1.1.2", - "elliptic": "^6.0.0", - "inherits": "^2.0.1", - "parse-asn1": "^5.0.0" - } + "@types/jest": { + "version": "23.3.11", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-23.3.11.tgz", + "integrity": "sha512-eroF85PoG87XjCwzxey7yBsQNkIY/TV5myKKSG/022A0FW25afdu/uub6JDMS5eT68zBBt82S+w/MFOTjeLM3Q==", + "dev": true }, - "browserify-zlib": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", - "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", - "requires": { - "pako": "~1.0.5" - } + "@types/js-yaml": { + "version": "3.12.3", + "resolved": "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-3.12.3.tgz", + "integrity": "sha512-otRe77JNNWzoVGLKw8TCspKswRoQToys4tuL6XYVBFxjgeM0RUrx7m3jkaTdxILxeGry3zM8mGYkGXMeQ02guA==", + "dev": true }, - "browserslist": { - "version": "2.11.3", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-2.11.3.tgz", - "integrity": "sha512-yWu5cXT7Av6mVwzWc8lMsJMHWn4xyjSuGYi4IozbVTLUOEYPSagUB8kiMDUHA1fS3zjr8nkxkn9jdvug4BBRmA==", + "@types/json-schema": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.4.tgz", + "integrity": "sha512-8+KAKzEvSUdeo+kmqnKrqgeE+LcA0tjYWFY7RPProVYwnqDjukzO+3b6dLD56rYX5TdWejnEOLJYOIeh4CXKuA==", + "dev": true + }, + "@types/jss": { + "version": "9.5.7", + "resolved": "https://registry.npmjs.org/@types/jss/-/jss-9.5.7.tgz", + "integrity": "sha512-OZimStu2QdDMtZ0h72JXqvLVbWUjXd5ZLk8vxLmfuC/nM1AabRyyGoxSufnzixrbpEcVcyy/JV5qeQu2JnjVZw==", "requires": { - "caniuse-lite": "^1.0.30000792", - "electron-to-chromium": "^1.3.30" + "csstype": "^2.0.0", + "indefinite-observable": "^1.0.1" } }, - "bser": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/bser/-/bser-2.0.0.tgz", - "integrity": "sha1-mseNPtXZFYBP2HrLFYvHlxR6Fxk=", + "@types/lodash": { + "version": "4.14.119", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.119.tgz", + "integrity": "sha512-Z3TNyBL8Vd/M9D9Ms2S3LmFq2sSMzahodD6rCS9V2N44HUMINb75jNkSuwAx7eo2ufqTdfOdtGQpNbieUjPQmw==" + }, + "@types/lodash.groupby": { + "version": "4.6.6", + "resolved": "https://registry.npmjs.org/@types/lodash.groupby/-/lodash.groupby-4.6.6.tgz", + "integrity": "sha512-kwg3T7Ia63KtDNoQQR8hKrLHCAgrH4I44l5uEMuA6JCbj7DiSccaV4tNV1vbjtAOpX990SolVthJCmBVtRVRgw==", "requires": { - "node-int64": "^0.4.0" + "@types/lodash": "*" } }, - "buffer": { - "version": "4.9.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", - "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", + "@types/markdown-to-jsx": { + "version": "6.9.0", + "resolved": "https://registry.npmjs.org/@types/markdown-to-jsx/-/markdown-to-jsx-6.9.0.tgz", + "integrity": "sha512-LO/oxz+ZfwBDciiVGqLhhdyeWt196kgICe0QS88K1a2u/FgUF1QkeMAm4zdnAo1kNgo2KgFP1Uqy2IiPJLWppA==", + "dev": true, "requires": { - "base64-js": "^1.0.2", - "ieee754": "^1.1.4", - "isarray": "^1.0.0" + "@types/react": "*" } }, - "buffer-equal": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/buffer-equal/-/buffer-equal-0.0.1.tgz", - "integrity": "sha1-kbx0sR6kBbyRa8aqkI+q+ltKrEs=", + "@types/mime": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-2.0.0.tgz", + "integrity": "sha512-A2TAGbTFdBw9azHbpVd+/FkdW2T6msN1uct1O9bH3vTerEHKZhTXJUQXy+hNq1B0RagfU8U+KBdqiZpxjhOUQA==", "dev": true }, - "buffer-from": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", - "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" + "@types/minimatch": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.3.tgz", + "integrity": "sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==", + "dev": true }, - "buffer-indexof": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/buffer-indexof/-/buffer-indexof-1.1.1.tgz", - "integrity": "sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g==" + "@types/node": { + "version": "10.17.11", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.11.tgz", + "integrity": "sha512-dNd2pp8qTzzNLAs3O8nH3iU9DG9866KHq9L3ISPB7DOGERZN81nW/5/g/KzMJpCU8jrbCiMRBzV9/sCEdRosig==", + "dev": true }, - "buffer-xor": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", - "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=" + "@types/parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==", + "dev": true }, - "builtin-modules": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", - "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=" + "@types/prettier": { + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-1.19.0.tgz", + "integrity": "sha512-gDE8JJEygpay7IjA/u3JiIURvwZW08f0cZSZLAzFoX/ZmeqvS0Sqv+97aKuHpNsalAMMhwPe+iAS6fQbfmbt7A==", + "dev": true }, - "builtin-status-codes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", - "integrity": "sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug=" + "@types/prop-types": { + "version": "15.5.8", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.5.8.tgz", + "integrity": "sha512-3AQoUxQcQtLHsK25wtTWIoIpgYjH3vSDroZOUr7PpCHw/jLY1RB9z9E8dBT/OSmwStVgkRNvdh+ZHNiomRieaw==" }, - "bytes": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", - "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=" + "@types/q": { + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.2.tgz", + "integrity": "sha512-ce5d3q03Ex0sy4R14722Rmt6MT07Ua+k4FwDfdcToYJcMKNtRVQvJ6JCAPdAmAnbRb6CsX6aYb9m96NGod9uTw==", + "dev": true }, - "cacache": { - "version": "10.0.4", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-10.0.4.tgz", - "integrity": "sha512-Dph0MzuH+rTQzGPNT9fAnrPmMmjKfST6trxJeK7NQuHRaVw24VzPRWTmg9MpcwOVQZO0E1FBICUlFeNaKPIfHA==", + "@types/range-parser": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.3.tgz", + "integrity": "sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA==", + "dev": true + }, + "@types/react": { + "version": "16.9.22", + "resolved": "https://registry.npmjs.org/@types/react/-/react-16.9.22.tgz", + "integrity": "sha512-7OSt4EGiLvy0h5R7X+r0c7S739TCU/LvWbkNOrm10lUwNHe7XPz5OLhLOSZeCkqO9JSCly1NkYJ7ODTUqVnHJQ==", "requires": { - "bluebird": "^3.5.1", - "chownr": "^1.0.1", - "glob": "^7.1.2", - "graceful-fs": "^4.1.11", - "lru-cache": "^4.1.1", - "mississippi": "^2.0.0", - "mkdirp": "^0.5.1", - "move-concurrently": "^1.0.1", - "promise-inflight": "^1.0.1", - "rimraf": "^2.6.2", - "ssri": "^5.2.4", - "unique-filename": "^1.1.0", - "y18n": "^4.0.0" - }, - "dependencies": { - "y18n": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", - "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==" - } + "@types/prop-types": "*", + "csstype": "^2.2.0" } }, - "cache-base": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", - "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", + "@types/react-dom": { + "version": "16.9.5", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-16.9.5.tgz", + "integrity": "sha512-BX6RQ8s9D+2/gDhxrj8OW+YD4R+8hj7FEM/OJHGNR0KipE1h1mSsf39YeyC81qafkq+N3rU3h3RFbLSwE5VqUg==", + "dev": true, "requires": { - "collection-visit": "^1.0.0", - "component-emitter": "^1.2.1", - "get-value": "^2.0.6", - "has-value": "^1.0.0", - "isobject": "^3.0.1", - "set-value": "^2.0.0", - "to-object-path": "^0.3.0", - "union-value": "^1.0.0", - "unset-value": "^1.0.0" + "@types/react": "*" } }, - "callsites": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", - "integrity": "sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=" - }, - "camel-case": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-3.0.0.tgz", - "integrity": "sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M=", + "@types/react-router": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/@types/react-router/-/react-router-4.4.3.tgz", + "integrity": "sha512-8GmjakEBFNCLJbpg9jtDp1EDvFP0VkIPPKBpVwmB3Q+9whFoHu8rluMUXUE5SoGkEQvVOtgJzWmUsJojNpFMQQ==", + "dev": true, "requires": { - "no-case": "^2.2.0", - "upper-case": "^1.1.1" + "@types/history": "*", + "@types/react": "*" } }, - "camelcase": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", - "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=" - }, - "camelcase-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", - "integrity": "sha1-MIvur/3ygRkFHvodkyITyRuPkuc=", + "@types/react-router-dom": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-4.3.1.tgz", + "integrity": "sha512-GbztJAScOmQ/7RsQfO4cd55RuH1W4g6V1gDW3j4riLlt+8yxYLqqsiMzmyuXBLzdFmDtX/uU2Bpcm0cmudv44A==", + "dev": true, "requires": { - "camelcase": "^2.0.0", - "map-obj": "^1.0.0" - }, - "dependencies": { - "camelcase": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz", - "integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8=" - } + "@types/history": "*", + "@types/react": "*", + "@types/react-router": "*" } }, - "caniuse-api": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-1.6.1.tgz", - "integrity": "sha1-tTTnxzTE+B7F++isoq0kNUuWLGw=", + "@types/react-test-renderer": { + "version": "16.0.3", + "resolved": "https://registry.npmjs.org/@types/react-test-renderer/-/react-test-renderer-16.0.3.tgz", + "integrity": "sha512-NWOAxVQeJxpXuNKgw83Hah0nquiw1nUexM9qY/Hk3a+XhZwgMtaa6GLA9E1TKMT75Odb3/KE/jiBO4enTuEJjQ==", + "dev": true, "requires": { - "browserslist": "^1.3.6", - "caniuse-db": "^1.0.30000529", - "lodash.memoize": "^4.1.2", - "lodash.uniq": "^4.5.0" - }, - "dependencies": { - "browserslist": { - "version": "1.7.7", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-1.7.7.tgz", - "integrity": "sha1-C9dnBCWL6CmyOYu1Dkti0aFmsLk=", - "requires": { - "caniuse-db": "^1.0.30000639", - "electron-to-chromium": "^1.2.7" - } - } + "@types/react": "*" } }, - "caniuse-db": { - "version": "1.0.30000927", - "resolved": "https://registry.npmjs.org/caniuse-db/-/caniuse-db-1.0.30000927.tgz", - "integrity": "sha512-CX/QvLA8oh7kQ9cHCCzFm0UZW4KwSyQSRJ5A1XtH42HaMJQ0yh+9fEVWagMqv9I1vSCtaqA5Mb8k0uKfv7jhDw==" - }, - "caniuse-lite": { - "version": "1.0.30000927", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30000927.tgz", - "integrity": "sha512-ogq4NbUWf1uG/j66k0AmiO3GjqJAlQyF8n4w8a954cbCyFKmYGvRtgz6qkq2fWuduTXHibX7GyYL5Pg58Aks2g==" - }, - "capture-exit": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/capture-exit/-/capture-exit-1.2.0.tgz", - "integrity": "sha1-HF/MSJ/QqwDU8ax64QcuMXP7q28=", + "@types/react-transition-group": { + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-2.0.15.tgz", + "integrity": "sha512-S0QnNzbHoWXDbKBl/xk5dxA4FT+BNlBcI3hku991cl8Cz3ytOkUMcCRtzdX11eb86E131bSsQqy5WrPCdJYblw==", "requires": { - "rsvp": "^3.3.3" + "@types/react": "*" } }, - "capture-stack-trace": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz", - "integrity": "sha512-mYQLZnx5Qt1JgB1WEiMCf2647plpGeQ2NMR/5L0HNZzGQo4fuSPnK+wjfPnKZV0aiJDgzmWqqkV/g7JD+DW0qw==" - }, - "cardinal": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/cardinal/-/cardinal-2.1.1.tgz", - "integrity": "sha1-fMEFXYItISlU0HsIXeolHMe8VQU=", + "@types/react-virtualized": { + "version": "9.18.11", + "resolved": "https://registry.npmjs.org/@types/react-virtualized/-/react-virtualized-9.18.11.tgz", + "integrity": "sha512-KT2FzDtV9YT2uN+5g9HPaME4Dtdlh7tEHEqTsOExWYzJGZjGKqHgBHTZC0vnHkzU1rXF9rVDxpi5MZuApoO7rA==", "dev": true, "requires": { - "ansicolors": "~0.3.2", - "redeyed": "~2.1.0" + "@types/prop-types": "*", + "@types/react": "*" } }, - "case-sensitive-paths-webpack-plugin": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.1.1.tgz", - "integrity": "sha1-PSnO2MHxJL9vU4Rvs/WJRzH9yQk=" + "@types/serve-static": { + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.2.tgz", + "integrity": "sha512-/BZ4QRLpH/bNYgZgwhKEh+5AsboDBcUdlBYgzoLX0fpj3Y2gp6EApyOlM3bK53wQS/OE1SrdSYBAbux2D1528Q==", + "dev": true, + "requires": { + "@types/express-serve-static-core": "*", + "@types/mime": "*" + } }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" + "@types/stack-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-1.0.1.tgz", + "integrity": "sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw==", + "dev": true }, - "casperjs": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/casperjs/-/casperjs-1.1.4.tgz", - "integrity": "sha1-6wH07YWsUgqPTZMrTap00+d7x0Y=", + "@types/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-FKjsOVbC6B7bdSB5CuzyHCkK69I=", "dev": true }, - "center-align": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/center-align/-/center-align-0.1.3.tgz", - "integrity": "sha1-qg0yYptu6XIgBBHL1EYckHvCt60=", - "requires": { - "align-text": "^0.1.3", - "lazy-cache": "^1.0.3" - } + "@types/strip-json-comments": { + "version": "0.0.30", + "resolved": "https://registry.npmjs.org/@types/strip-json-comments/-/strip-json-comments-0.0.30.tgz", + "integrity": "sha512-7NQmHra/JILCd1QqpSzl8+mJRc8ZHz3uDm8YV1Ks9IhK0epEiTw8aIErbvH9PI+6XbqhyIQy3462nEsn7UVzjQ==", + "dev": true }, - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "@types/testing-library__dom": { + "version": "6.12.1", + "resolved": "https://registry.npmjs.org/@types/testing-library__dom/-/testing-library__dom-6.12.1.tgz", + "integrity": "sha512-cgqnEjxKk31tQt29j4baSWaZPNjQf3bHalj2gcHQTpW5SuHRal76gOpF0vypeEo6o+sS5inOvvNdzLY0B3FB2A==", + "dev": true, "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" + "pretty-format": "^24.3.0" }, "dependencies": { - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=" + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=" + "pretty-format": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-24.9.0.tgz", + "integrity": "sha512-00ZMZUiHaJrNfk33guavqgvfJS30sLYf0f8+Srklv0AMPodGGHcoHgksZ3OThYnIvOd+8yMCn0YiEOogjlgsnA==", + "dev": true, + "requires": { + "@jest/types": "^24.9.0", + "ansi-regex": "^4.0.0", + "ansi-styles": "^3.2.0", + "react-is": "^16.8.4" + } + }, + "react-is": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.12.0.tgz", + "integrity": "sha512-rPCkf/mWBtKc97aLL9/txD8DZdemK0vkA3JMLShjlJB3Pj3s+lpf1KaBzMfQrAmhMQB0n1cU/SUGgKKBCe837Q==", + "dev": true } } }, - "change-case": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/change-case/-/change-case-3.0.2.tgz", - "integrity": "sha512-Mww+SLF6MZ0U6kdg11algyKd5BARbyM4TbFBepwowYSR5ClfQGCGtxNXgykpN0uF/bstWeaGDT4JWaDh8zWAHA==", + "@types/testing-library__react": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/@types/testing-library__react/-/testing-library__react-9.1.2.tgz", + "integrity": "sha512-CYaMqrswQ+cJACy268jsLAw355DZtPZGt3Jwmmotlcu8O/tkoXBI6AeZ84oZBJsIsesozPKzWzmv/0TIU+1E9Q==", "dev": true, "requires": { - "camel-case": "^3.0.0", - "constant-case": "^2.0.0", - "dot-case": "^2.1.0", - "header-case": "^1.0.0", - "is-lower-case": "^1.1.0", - "is-upper-case": "^1.1.0", - "lower-case": "^1.1.1", - "lower-case-first": "^1.0.0", - "no-case": "^2.3.2", - "param-case": "^2.1.0", - "pascal-case": "^2.0.0", - "path-case": "^2.1.0", - "sentence-case": "^2.1.0", - "snake-case": "^2.1.0", - "swap-case": "^1.1.0", - "title-case": "^2.1.0", - "upper-case": "^1.1.1", - "upper-case-first": "^1.1.0" + "@types/react-dom": "*", + "@types/testing-library__dom": "*" } }, - "change-emitter": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/change-emitter/-/change-emitter-0.1.6.tgz", - "integrity": "sha1-6LL+PX8at9aaMhma/5HqaTFAlRU=" - }, - "chardet": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.4.2.tgz", - "integrity": "sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I=" + "@types/yargs": { + "version": "13.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-13.0.3.tgz", + "integrity": "sha512-K8/LfZq2duW33XW/tFwEAfnZlqIfVsoyRB3kfXdPXYhl0nfM8mmh7GS0jg7WrX2Dgq/0Ha/pR1PaR+BvmWwjiQ==", + "dev": true, + "requires": { + "@types/yargs-parser": "*" + } }, - "check-types": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/check-types/-/check-types-7.4.0.tgz", - "integrity": "sha512-YbulWHdfP99UfZ73NcUDlNJhEIDgm9Doq9GhpyXbF+7Aegi3CVV7qqMCKTTqJxlvEvnQBp9IA+dxsGN6xK/nSg==", + "@types/yargs-parser": { + "version": "13.1.0", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-13.1.0.tgz", + "integrity": "sha512-gCubfBUZ6KxzoibJ+SCUc/57Ms1jz5NjHe4+dI2krNmU5zCPAphyLJYyTOg06ueIyfj+SaCUqmzun7ImlxDcKg==", "dev": true }, - "cheerio": { - "version": "1.0.0-rc.2", - "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.2.tgz", - "integrity": "sha1-S59TqBsn5NXawxwP/Qz6A8xoMNs=", + "@typescript-eslint/eslint-plugin": { + "version": "2.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.27.0.tgz", + "integrity": "sha512-/my+vVHRN7zYgcp0n4z5A6HAK7bvKGBiswaM5zIlOQczsxj/aiD7RcgD+dvVFuwFaGh5+kM7XA6Q6PN0bvb1tw==", "dev": true, "requires": { - "css-select": "~1.2.0", - "dom-serializer": "~0.1.0", - "entities": "~1.1.1", - "htmlparser2": "^3.9.1", - "lodash": "^4.15.0", - "parse5": "^3.0.1" + "@typescript-eslint/experimental-utils": "2.27.0", + "functional-red-black-tree": "^1.0.1", + "regexpp": "^3.0.0", + "tsutils": "^3.17.1" + } + }, + "@typescript-eslint/experimental-utils": { + "version": "2.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-2.27.0.tgz", + "integrity": "sha512-vOsYzjwJlY6E0NJRXPTeCGqjv5OHgRU1kzxHKWJVPjDYGbPgLudBXjIlc+OD1hDBZ4l1DLbOc5VjofKahsu9Jw==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.3", + "@typescript-eslint/typescript-estree": "2.27.0", + "eslint-scope": "^5.0.0", + "eslint-utils": "^2.0.0" + } + }, + "@typescript-eslint/parser": { + "version": "2.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.27.0.tgz", + "integrity": "sha512-HFUXZY+EdwrJXZo31DW4IS1ujQW3krzlRjBrFRrJcMDh0zCu107/nRfhk/uBasO8m0NVDbBF5WZKcIUMRO7vPg==", + "dev": true, + "requires": { + "@types/eslint-visitor-keys": "^1.0.0", + "@typescript-eslint/experimental-utils": "2.27.0", + "@typescript-eslint/typescript-estree": "2.27.0", + "eslint-visitor-keys": "^1.1.0" + } + }, + "@typescript-eslint/typescript-estree": { + "version": "2.27.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.27.0.tgz", + "integrity": "sha512-t2miCCJIb/FU8yArjAvxllxbTiyNqaXJag7UOpB5DVoM3+xnjeOngtqlJkLRnMtzaRcJhe3CIR9RmL40omubhg==", + "dev": true, + "requires": { + "debug": "^4.1.1", + "eslint-visitor-keys": "^1.1.0", + "glob": "^7.1.6", + "is-glob": "^4.0.1", + "lodash": "^4.17.15", + "semver": "^6.3.0", + "tsutils": "^3.17.1" }, "dependencies": { - "domhandler": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.4.2.tgz", - "integrity": "sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==", + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", "dev": true, "requires": { - "domelementtype": "1" + "ms": "^2.1.1" } }, - "htmlparser2": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.10.0.tgz", - "integrity": "sha512-J1nEUGv+MkXS0weHNWVKJJ+UrLfePxRWpN3C9bEi9fLxL2+ggW94DQvgYVXsaT30PGwYRIZKNZXuyMhp3Di4bQ==", + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", "dev": true, "requires": { - "domelementtype": "^1.3.0", - "domhandler": "^2.3.0", - "domutils": "^1.5.1", - "entities": "^1.1.1", - "inherits": "^2.0.1", - "readable-stream": "^3.0.6" + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" } }, - "parse5": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-3.0.3.tgz", - "integrity": "sha512-rgO9Zg5LLLkfJF9E6CCmXlSE4UVceloys8JrFqCcHloC3usd/kJCyPDwH2SOlzix2j3xaP9sUX3e8+kvkuleAA==", + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", "dev": true, "requires": { - "@types/node": "*" + "is-extglob": "^2.1.1" } }, - "readable-stream": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.1.1.tgz", - "integrity": "sha512-DkN66hPyqDhnIQ6Jcsvx9bFjhw214O4poMBcIMgPVpQvNy9a0e0Uhg5SqySyDKAmUlwt8LonTBz1ezOnM8pUdA==", - "dev": true, - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true } } }, - "chokidar": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-1.7.0.tgz", - "integrity": "sha1-eY5ol3gVHIB2tLNg5e3SjNortGg=", + "@webassemblyjs/ast": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.8.5.tgz", + "integrity": "sha512-aJMfngIZ65+t71C3y2nBBg5FFG0Okt9m0XEgWZ7Ywgn1oMAT8cNwx00Uv1cQyHtidq0Xn94R4TAywO+LCQ+ZAQ==", + "dev": true, "requires": { - "anymatch": "^1.3.0", - "async-each": "^1.0.0", - "fsevents": "^1.0.0", - "glob-parent": "^2.0.0", - "inherits": "^2.0.1", - "is-binary-path": "^1.0.0", - "is-glob": "^2.0.0", - "path-is-absolute": "^1.0.0", - "readdirp": "^2.0.0" - }, - "dependencies": { - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "requires": { - "is-extglob": "^1.0.0" - } - } + "@webassemblyjs/helper-module-context": "1.8.5", + "@webassemblyjs/helper-wasm-bytecode": "1.8.5", + "@webassemblyjs/wast-parser": "1.8.5" } }, - "chownr": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.1.tgz", - "integrity": "sha512-j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g==" + "@webassemblyjs/floating-point-hex-parser": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.8.5.tgz", + "integrity": "sha512-9p+79WHru1oqBh9ewP9zW95E3XAo+90oth7S5Re3eQnECGq59ly1Ri5tsIipKGpiStHsUYmY3zMLqtk3gTcOtQ==", + "dev": true + }, + "@webassemblyjs/helper-api-error": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.8.5.tgz", + "integrity": "sha512-Za/tnzsvnqdaSPOUXHyKJ2XI7PDX64kWtURyGiJJZKVEdFOsdKUCPTNEVFZq3zJ2R0G5wc2PZ5gvdTRFgm81zA==", + "dev": true + }, + "@webassemblyjs/helper-buffer": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.8.5.tgz", + "integrity": "sha512-Ri2R8nOS0U6G49Q86goFIPNgjyl6+oE1abW1pS84BuhP1Qcr5JqMwRFT3Ah3ADDDYGEgGs1iyb1DGX+kAi/c/Q==", + "dev": true }, - "chrome-launcher": { - "version": "0.10.5", - "resolved": "https://registry.npmjs.org/chrome-launcher/-/chrome-launcher-0.10.5.tgz", - "integrity": "sha512-Gbzg8HlWhyuoVqflhiXwfFXhzNfNWvAkSWv2QR1Yl6mwsMo1oCLAVjp2tIySuS4lrZLEjzVx1fOy584yE76P4g==", + "@webassemblyjs/helper-code-frame": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.8.5.tgz", + "integrity": "sha512-VQAadSubZIhNpH46IR3yWO4kZZjMxN1opDrzePLdVKAZ+DFjkGD/rf4v1jap744uPVU6yjL/smZbRIIJTOUnKQ==", "dev": true, "requires": { - "@types/core-js": "^0.9.41", - "@types/mkdirp": "^0.3.29", - "@types/node": "^9.3.0", - "@types/rimraf": "^0.0.28", - "is-wsl": "^1.1.0", - "lighthouse-logger": "^1.0.0", - "mkdirp": "0.5.1", - "rimraf": "^2.6.1" - }, - "dependencies": { - "@types/node": { - "version": "9.6.41", - "resolved": "https://registry.npmjs.org/@types/node/-/node-9.6.41.tgz", - "integrity": "sha512-sPZWEbFMz6qAy9SLY7jh5cgepmsiwqUUHjvEm8lpU6kug2hmmcyuTnwhoGw/GWpI5Npue4EqvsiQQI0eWjW/ZA==", - "dev": true - } + "@webassemblyjs/wast-printer": "1.8.5" } }, - "chrome-remote-interface": { - "version": "0.25.7", - "resolved": "https://registry.npmjs.org/chrome-remote-interface/-/chrome-remote-interface-0.25.7.tgz", - "integrity": "sha512-6zI6LbR2IiGmduFZededaerEr9hHXabxT/L+fRrdq65a0CfyLMzpq0BKuZiqN0Upqcacsb6q2POj7fmobwBsEA==", + "@webassemblyjs/helper-fsm": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-fsm/-/helper-fsm-1.8.5.tgz", + "integrity": "sha512-kRuX/saORcg8se/ft6Q2UbRpZwP4y7YrWsLXPbbmtepKr22i8Z4O3V5QE9DbZK908dh5Xya4Un57SDIKwB9eow==", + "dev": true + }, + "@webassemblyjs/helper-module-context": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-module-context/-/helper-module-context-1.8.5.tgz", + "integrity": "sha512-/O1B236mN7UNEU4t9X7Pj38i4VoU8CcMHyy3l2cV/kIF4U5KoHXDVqcDuOs1ltkac90IM4vZdHc52t1x8Yfs3g==", "dev": true, "requires": { - "commander": "2.11.x", - "ws": "3.3.x" - }, - "dependencies": { - "commander": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", - "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", - "dev": true - }, - "ws": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-3.3.3.tgz", - "integrity": "sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA==", - "dev": true, - "requires": { - "async-limiter": "~1.0.0", - "safe-buffer": "~5.1.0", - "ultron": "~1.1.0" - } - } + "@webassemblyjs/ast": "1.8.5", + "mamacro": "^0.0.3" } }, - "chromy": { - "version": "0.5.11", - "resolved": "https://registry.npmjs.org/chromy/-/chromy-0.5.11.tgz", - "integrity": "sha512-SBz5/xPYgeQiwFGMixPNQcbgnzsRHGCJnZKNJ5S0SmauSUY77/mqA8NjotQa12SZ3tZy0NX2gyGP41BAUWI6Rg==", + "@webassemblyjs/helper-wasm-bytecode": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.8.5.tgz", + "integrity": "sha512-Cu4YMYG3Ddl72CbmpjU/wbP6SACcOPVbHN1dI4VJNJVgFwaKf1ppeFJrwydOG3NDHxVGuCfPlLZNyEdIYlQ6QQ==", + "dev": true + }, + "@webassemblyjs/helper-wasm-section": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.8.5.tgz", + "integrity": "sha512-VV083zwR+VTrIWWtgIUpqfvVdK4ff38loRmrdDBgBT8ADXYsEZ5mPQ4Nde90N3UYatHdYoDIFb7oHzMncI02tA==", "dev": true, "requires": { - "async-chain-proxy": "^0.1.5", - "babel-runtime": "^6.26.0", - "chrome-launcher": "^0.10.2", - "chrome-remote-interface": "^0.25.5", - "jimp": "^0.2.28", - "uuid": "^3.2.1" + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-buffer": "1.8.5", + "@webassemblyjs/helper-wasm-bytecode": "1.8.5", + "@webassemblyjs/wasm-gen": "1.8.5" } }, - "ci-info": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-1.6.0.tgz", - "integrity": "sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A==" + "@webassemblyjs/ieee754": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.8.5.tgz", + "integrity": "sha512-aaCvQYrvKbY/n6wKHb/ylAJr27GglahUO89CcGXMItrOBqRarUMxWLJgxm9PJNuKULwN5n1csT9bYoMeZOGF3g==", + "dev": true, + "requires": { + "@xtuc/ieee754": "^1.2.0" + } }, - "cipher-base": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", - "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "@webassemblyjs/leb128": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.8.5.tgz", + "integrity": "sha512-plYUuUwleLIziknvlP8VpTgO4kqNaH57Y3JnNa6DLpu/sGcP6hbVdfdX5aHAV716pQBKrfuU26BJK29qY37J7A==", + "dev": true, "requires": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" + "@xtuc/long": "4.2.2" } }, - "clap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/clap/-/clap-1.2.3.tgz", - "integrity": "sha512-4CoL/A3hf90V3VIEjeuhSvlGFEHKzOz+Wfc2IVZc+FaUgU0ZQafJTP49fvnULipOPcAfqhyI2duwQyns6xqjYA==", + "@webassemblyjs/utf8": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.8.5.tgz", + "integrity": "sha512-U7zgftmQriw37tfD934UNInokz6yTmn29inT2cAetAsaU9YeVCveWEwhKL1Mg4yS7q//NGdzy79nlXh3bT8Kjw==", + "dev": true + }, + "@webassemblyjs/wasm-edit": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.8.5.tgz", + "integrity": "sha512-A41EMy8MWw5yvqj7MQzkDjU29K7UJq1VrX2vWLzfpRHt3ISftOXqrtojn7nlPsZ9Ijhp5NwuODuycSvfAO/26Q==", + "dev": true, "requires": { - "chalk": "^1.1.3" + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-buffer": "1.8.5", + "@webassemblyjs/helper-wasm-bytecode": "1.8.5", + "@webassemblyjs/helper-wasm-section": "1.8.5", + "@webassemblyjs/wasm-gen": "1.8.5", + "@webassemblyjs/wasm-opt": "1.8.5", + "@webassemblyjs/wasm-parser": "1.8.5", + "@webassemblyjs/wast-printer": "1.8.5" } }, - "class-utils": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", - "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", + "@webassemblyjs/wasm-gen": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.8.5.tgz", + "integrity": "sha512-BCZBT0LURC0CXDzj5FXSc2FPTsxwp3nWcqXQdOZE4U7h7i8FqtFK5Egia6f9raQLpEKT1VL7zr4r3+QX6zArWg==", + "dev": true, "requires": { - "arr-union": "^3.1.0", - "define-property": "^0.2.5", - "isobject": "^3.0.0", - "static-extend": "^0.1.1" + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-wasm-bytecode": "1.8.5", + "@webassemblyjs/ieee754": "1.8.5", + "@webassemblyjs/leb128": "1.8.5", + "@webassemblyjs/utf8": "1.8.5" + } + }, + "@webassemblyjs/wasm-opt": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.8.5.tgz", + "integrity": "sha512-HKo2mO/Uh9A6ojzu7cjslGaHaUU14LdLbGEKqTR7PBKwT6LdPtLLh9fPY33rmr5wcOMrsWDbbdCHq4hQUdd37Q==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-buffer": "1.8.5", + "@webassemblyjs/wasm-gen": "1.8.5", + "@webassemblyjs/wasm-parser": "1.8.5" + } + }, + "@webassemblyjs/wasm-parser": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.8.5.tgz", + "integrity": "sha512-pi0SYE9T6tfcMkthwcgCpL0cM9nRYr6/6fjgDtL6q/ZqKHdMWvxitRi5JcZ7RI4SNJJYnYNaWy5UUrHQy998lw==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-api-error": "1.8.5", + "@webassemblyjs/helper-wasm-bytecode": "1.8.5", + "@webassemblyjs/ieee754": "1.8.5", + "@webassemblyjs/leb128": "1.8.5", + "@webassemblyjs/utf8": "1.8.5" + } + }, + "@webassemblyjs/wast-parser": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-parser/-/wast-parser-1.8.5.tgz", + "integrity": "sha512-daXC1FyKWHF1i11obK086QRlsMsY4+tIOKgBqI1lxAnkp9xe9YMcgOxm9kLe+ttjs5aWV2KKE1TWJCN57/Btsg==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/floating-point-hex-parser": "1.8.5", + "@webassemblyjs/helper-api-error": "1.8.5", + "@webassemblyjs/helper-code-frame": "1.8.5", + "@webassemblyjs/helper-fsm": "1.8.5", + "@xtuc/long": "4.2.2" + } + }, + "@webassemblyjs/wast-printer": { + "version": "1.8.5", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.8.5.tgz", + "integrity": "sha512-w0U0pD4EhlnvRyeJzBqaVSJAo9w/ce7/WPogeXLzGkO6hzhr4GnQIZ4W4uUt5b9ooAaXPtnXlj0gzsXEOUNYMg==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/wast-parser": "1.8.5", + "@xtuc/long": "4.2.2" + } + }, + "@xtuc/ieee754": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", + "dev": true + }, + "@xtuc/long": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", + "dev": true + }, + "abab": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.3.tgz", + "integrity": "sha512-tsFzPpcttalNjFBCFMqsKYQcWxxen1pgJR56by//QwvJc4/OUS3kPOOttx2tSIfjsylB0pYu7f5D3K1RCxUnUg==", + "dev": true + }, + "abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dev": true, + "requires": { + "event-target-shim": "^5.0.0" + } + }, + "accepts": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", + "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", + "dev": true, + "requires": { + "mime-types": "~2.1.24", + "negotiator": "0.6.2" }, "dependencies": { - "define-property": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", - "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "mime-db": { + "version": "1.43.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz", + "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==", + "dev": true + }, + "mime-types": { + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", + "dev": true, "requires": { - "is-descriptor": "^0.1.0" + "mime-db": "1.43.0" } } } }, - "classnames": { - "version": "2.2.6", - "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.2.6.tgz", - "integrity": "sha512-JR/iSQOSt+LQIWwrwEzJ9uk0xfN3mTVYMwt1Ir5mUcSN6pU+V4zQFFaJsclJbPuAUQH+yfWef6tm7l1quW3C8Q==" + "acorn": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", + "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", + "dev": true }, - "clean-css": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-4.2.1.tgz", - "integrity": "sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g==", + "acorn-globals": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-4.3.4.tgz", + "integrity": "sha512-clfQEh21R+D0leSbUdWf3OcfqyaCSAQ8Ryq00bofSekfr9W8u1jyYZo6ir0xu9Gtcf7BjcHJpnbZH7JOCpP60A==", + "dev": true, "requires": { - "source-map": "~0.6.0" + "acorn": "^6.0.1", + "acorn-walk": "^6.0.1" }, "dependencies": { - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "acorn": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", + "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==", + "dev": true } } }, - "cli-boxes": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-1.0.0.tgz", - "integrity": "sha1-T6kXw+WclKAEzWH47lCdplFocUM=" + "acorn-jsx": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.2.0.tgz", + "integrity": "sha512-HiUX/+K2YpkpJ+SzBffkM/AQ2YE03S0U1kjTLVpoJdhZMOWy8qvXVN9JdLqv2QsaQ6MPYQIuNmwD8zOiYUofLQ==", + "dev": true }, - "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", - "requires": { - "restore-cursor": "^2.0.0" - } + "acorn-walk": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-6.2.0.tgz", + "integrity": "sha512-7evsyfH1cLOCdAzZAd43Cic04yKydNx0cF+7tiA19p1XnLLPU4dpCQOqpjqwokFe//vS0QqfqqjCS2JkiIs0cA==", + "dev": true }, - "cli-table": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/cli-table/-/cli-table-0.3.1.tgz", - "integrity": "sha1-9TsFJmqLGguTSz0IIebi3FkUriM=", + "address": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/address/-/address-1.1.2.tgz", + "integrity": "sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA==", + "dev": true + }, + "adjust-sourcemap-loader": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/adjust-sourcemap-loader/-/adjust-sourcemap-loader-2.0.0.tgz", + "integrity": "sha512-4hFsTsn58+YjrU9qKzML2JSSDqKvN8mUGQ0nNIrfPi8hmIONT4L3uUaT6MKdMsZ9AjsU6D2xDkZxCkbQPxChrA==", "dev": true, "requires": { - "colors": "1.0.3" + "assert": "1.4.1", + "camelcase": "5.0.0", + "loader-utils": "1.2.3", + "object-path": "0.11.4", + "regex-parser": "2.2.10" }, "dependencies": { - "colors": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", - "integrity": "sha1-BDP0TYCWgP3rYO0mDxsMJi6CpAs=", + "camelcase": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.0.0.tgz", + "integrity": "sha512-faqwZqnWxbxn+F1d399ygeamQNy3lPp/H9H6rNrqYh4FSVCtcY+3cub1MxA8o9mDd55mM8Aghuu/kuyYA6VTsA==", "dev": true } } }, - "cli-usage": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/cli-usage/-/cli-usage-0.1.8.tgz", - "integrity": "sha512-EZJ+ty1TsqdnhZNt2QbI+ed3IUNHTH31blSOJLVph3oL4IExskPRyCDGJH7RuCBPy3QBmWgpbeUxXPhK0isXIw==", + "agent-base": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.1.tgz", + "integrity": "sha512-JVwXMr9nHYTUXsBFKUqhJwvlcYU/blreOEUkhNR2eXZIvwd+c+o5V4MgDPKWnMS/56awN3TRzIP+KoPn+roQtg==", "dev": true, "requires": { - "marked": "^0.5.0", - "marked-terminal": "^3.0.0" + "es6-promisify": "^5.0.0" } }, - "cli-width": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", - "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=" - }, - "cliui": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", - "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", + "aggregate-error": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.0.1.tgz", + "integrity": "sha512-quoaXsZ9/BLNae5yiNoUz+Nhkwz83GhWwtYFglcjEQB2NDHCIpApbqXxIFnm4Pq/Nvhrsq5sYJFyohrrxnTGAA==", + "dev": true, "requires": { - "string-width": "^2.1.1", - "strip-ansi": "^4.0.0", - "wrap-ansi": "^2.0.0" + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" }, "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" - }, - "strip-ansi": { + "indent-string": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true + } + } + }, + "airbnb-prop-types": { + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/airbnb-prop-types/-/airbnb-prop-types-2.15.0.tgz", + "integrity": "sha512-jUh2/hfKsRjNFC4XONQrxo/n/3GG4Tn6Hl0WlFQN5PY9OMC9loSCoAYKnZsWaP8wEfd5xcrPloK0Zg6iS1xwVA==", + "dev": true, + "requires": { + "array.prototype.find": "^2.1.0", + "function.prototype.name": "^1.1.1", + "has": "^1.0.3", + "is-regex": "^1.0.4", + "object-is": "^1.0.1", + "object.assign": "^4.1.0", + "object.entries": "^1.1.0", + "prop-types": "^15.7.2", + "prop-types-exact": "^1.2.0", + "react-is": "^16.9.0" + }, + "dependencies": { + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "dev": true, "requires": { - "ansi-regex": "^3.0.0" + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" } + }, + "react-is": { + "version": "16.10.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.10.2.tgz", + "integrity": "sha512-INBT1QEgtcCCgvccr5/86CfD71fw9EPmDxgiJX4I2Ddr6ZsV6iFXsuby+qWJPtmNuMY0zByTsG4468P7nHuNWA==", + "dev": true } } }, - "clone": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", - "integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=" - }, - "co": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=" - }, - "coa": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/coa/-/coa-1.0.4.tgz", - "integrity": "sha1-qe8VNmDWqGqL3sAomlxoTSF0Mv0=", + "ajv": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.0.tgz", + "integrity": "sha512-D6gFiFA0RRLyUbvijN74DWAjXSFxWKaWP7mldxkVhyhAV3+SWA9HEJPHQ2c9soIeTFJqcSdFDGFgdqs1iUU2Hw==", + "dev": true, "requires": { - "q": "^1.1.2" + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" } }, - "code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=" + "ajv-errors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ajv-errors/-/ajv-errors-1.0.1.tgz", + "integrity": "sha512-DCRfO/4nQ+89p/RK43i8Ezd41EqdGIU4ld7nGF8OQ14oc/we5rEntLCUa7+jrn3nn83BosfwZA0wb4pon2o8iQ==", + "dev": true }, - "collection-visit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", - "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", - "requires": { - "map-visit": "^1.0.0", - "object-visit": "^1.0.0" - } + "ajv-keywords": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.2.0.tgz", + "integrity": "sha1-6GuBnGAs+IIa1jdBNpjx3sAhhHo=", + "dev": true }, - "color": { - "version": "0.11.4", - "resolved": "https://registry.npmjs.org/color/-/color-0.11.4.tgz", - "integrity": "sha1-bXtcdPtl6EHNSHkq0e1eB7kE12Q=", - "requires": { - "clone": "^1.0.2", - "color-convert": "^1.3.0", - "color-string": "^0.3.0" - } + "alphanum-sort": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/alphanum-sort/-/alphanum-sort-1.0.2.tgz", + "integrity": "sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=", + "dev": true }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "ansi-colors": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz", + "integrity": "sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==", + "dev": true + }, + "ansi-escapes": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.1.0.tgz", + "integrity": "sha512-UgAb8H9D41AQnu/PbWlCofQVcnV4Gs2bBJi9eZPxfU/hgglFh3SMDMENRIqdr7H6XFnXdoknctFByVsCOotTVw==", + "dev": true + }, + "ansi-html": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/ansi-html/-/ansi-html-0.0.7.tgz", + "integrity": "sha1-gTWEAhliqenm/QOflA0S9WynhZ4=", + "dev": true + }, + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, "requires": { - "color-name": "1.1.3" + "color-convert": "^1.9.0" } }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + "ansicolors": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/ansicolors/-/ansicolors-0.3.2.tgz", + "integrity": "sha1-ZlWX3oap/+Oqm/vmyuXG6kJrSXk=", + "dev": true }, - "color-string": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-0.3.0.tgz", - "integrity": "sha1-J9RvtnAlxcL6JZk7+/V55HhBuZE=", + "anymatch": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", + "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", + "dev": true, "requires": { - "color-name": "^1.0.0" + "micromatch": "^3.1.4", + "normalize-path": "^2.1.1" } }, - "colormin": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/colormin/-/colormin-1.1.2.tgz", - "integrity": "sha1-6i90IKcrlogaOKrlnsEkpvcpgTM=", + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "dev": true + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", "requires": { - "color": "^0.11.0", - "css-color-names": "0.0.4", - "has": "^1.0.1" + "sprintf-js": "~1.0.2" } }, - "colors": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/colors/-/colors-1.1.2.tgz", - "integrity": "sha1-FopHAXVran9RoSzgyXv6KMCE7WM=" - }, - "combined-stream": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz", - "integrity": "sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w==", + "aria-query": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-3.0.0.tgz", + "integrity": "sha1-ZbP8wcoRVajJrmTW7uKX8V1RM8w=", + "dev": true, "requires": { - "delayed-stream": "~1.0.0" + "ast-types-flow": "0.0.7", + "commander": "^2.11.0" } }, - "commander": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.19.0.tgz", - "integrity": "sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg==" + "arity-n": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arity-n/-/arity-n-1.0.4.tgz", + "integrity": "sha1-2edrEXM+CFacCEeuezmyhgswt0U=", + "dev": true }, - "commondir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", - "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=" + "arr-diff": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", + "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=" }, - "component-emitter": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", - "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=" + "arr-flatten": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", + "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==" }, - "compressible": { - "version": "2.0.15", - "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.15.tgz", - "integrity": "sha512-4aE67DL33dSW9gw4CI2H/yTxqHLNcxp0yS6jB+4h+wr3e43+1z7vm0HU9qXOH8j+qjKuL8+UtkOxYQSMq60Ylw==", - "requires": { - "mime-db": ">= 1.36.0 < 2" - } + "arr-union": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", + "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=" }, - "compression": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.3.tgz", - "integrity": "sha512-HSjyBG5N1Nnz7tF2+O7A9XUhyjru71/fwgNb7oIsEVHR0WShfs2tIS/EySLgiTe98aOK18YDlMXpzjCXY/n9mg==", + "array-equal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz", + "integrity": "sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM=", + "dev": true + }, + "array-find-index": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz", + "integrity": "sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=", + "dev": true + }, + "array-flatten": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz", + "integrity": "sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ==", + "dev": true + }, + "array-includes": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.1.tgz", + "integrity": "sha512-c2VXaCHl7zPsvpkFsw4nxvFie4fh1ur9bpcgsVkIjqn0H/Xwdg+7fv3n2r/isyS8EBj5b06M9kHyZuIr4El6WQ==", + "dev": true, "requires": { - "accepts": "~1.3.5", - "bytes": "3.0.0", - "compressible": "~2.0.14", - "debug": "2.6.9", - "on-headers": "~1.0.1", - "safe-buffer": "5.1.2", - "vary": "~1.1.2" + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0", + "is-string": "^1.0.5" }, "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, "requires": { - "ms": "2.0.0" + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-string": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "dev": true + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "string.prototype.trimleft": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", + "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimstart": "^1.0.0" + } + }, + "string.prototype.trimright": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", + "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimend": "^1.0.0" } } } }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" - }, - "concat-stream": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", - "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "array-union": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", + "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", + "dev": true, "requires": { - "buffer-from": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^2.2.2", - "typedarray": "^0.0.6" + "array-uniq": "^1.0.1" } }, - "configstore": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/configstore/-/configstore-3.1.2.tgz", - "integrity": "sha512-vtv5HtGjcYUgFrXc6Kx747B83MRRVS5R1VTEQoXvuP+kMI+if6uywV0nDGoiydJRy4yk7h9od5Og0kxx4zUXmw==", - "requires": { - "dot-prop": "^4.1.0", - "graceful-fs": "^4.1.2", - "make-dir": "^1.0.0", - "unique-string": "^1.0.0", - "write-file-atomic": "^2.0.0", - "xdg-basedir": "^3.0.0" - } + "array-uniq": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz", + "integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=", + "dev": true }, - "connect-history-api-fallback": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-1.5.0.tgz", - "integrity": "sha1-sGhzk0vF40T+9hGhlqb6rgruAVo=" + "array-unique": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", + "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=" }, - "console-browserify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.1.0.tgz", - "integrity": "sha1-8CQcRXMKn8YyOyBtvzjtx0HQuxA=", + "array.prototype.find": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array.prototype.find/-/array.prototype.find-2.1.0.tgz", + "integrity": "sha512-Wn41+K1yuO5p7wRZDl7890c3xvv5UBrfVXTVIe28rSQb6LS0fZMDrQB6PAcxQFRFy6vJTLDc3A2+3CjQdzVKRg==", + "dev": true, "requires": { - "date-now": "^0.1.4" + "define-properties": "^1.1.3", + "es-abstract": "^1.13.0" } }, - "constant-case": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/constant-case/-/constant-case-2.0.0.tgz", - "integrity": "sha1-QXV2TTidP6nI7NKRhu1gBSQ7akY=", + "array.prototype.flat": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.2.tgz", + "integrity": "sha512-VXjh7lAL4KXKF2hY4FnEW9eRW6IhdvFW1sN/JwLbmECbCgACCnBHNyP3lFiYuttr0jxRN9Bsc5+G27dMseSWqQ==", "dev": true, "requires": { - "snake-case": "^2.1.0", - "upper-case": "^1.1.1" + "define-properties": "^1.1.3", + "es-abstract": "^1.15.0", + "function-bind": "^1.1.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.15.0.tgz", + "integrity": "sha512-bhkEqWJ2t2lMeaJDuk7okMkJWI/yqgH/EoGwpcvv0XW9RWQsRspI4wt6xuyuvMvvQE3gg/D9HXppgk21w78GyQ==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.0", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.0", + "is-callable": "^1.1.4", + "is-regex": "^1.0.4", + "object-inspect": "^1.6.0", + "object-keys": "^1.1.1", + "string.prototype.trimleft": "^2.1.0", + "string.prototype.trimright": "^2.1.0" + } + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + } } }, - "constants-browserify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", - "integrity": "sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U=" - }, - "content-disposition": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz", - "integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ=" + "arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true }, - "content-type": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" + "asap": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", + "integrity": "sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=" }, - "convert-source-map": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.6.0.tgz", - "integrity": "sha512-eFu7XigvxdZ1ETfbgPBohgyQ/Z++C0eEhTor0qRwBw9unw+L0/6V8wkSuGgzdThkiS5lSpdptOQPD8Ak40a+7A==", + "asn1": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "dev": true, "requires": { - "safe-buffer": "~5.1.1" + "safer-buffer": "~2.1.0" } }, - "cookie": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz", - "integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=" - }, - "cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + "asn1.js": { + "version": "4.10.1", + "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz", + "integrity": "sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw==", + "dev": true, + "requires": { + "bn.js": "^4.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0" + } }, - "copy-concurrently": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/copy-concurrently/-/copy-concurrently-1.0.5.tgz", - "integrity": "sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A==", + "assert": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/assert/-/assert-1.4.1.tgz", + "integrity": "sha1-mZEtWRg2tab1s0XA8H7vwI/GXZE=", + "dev": true, "requires": { - "aproba": "^1.1.1", - "fs-write-stream-atomic": "^1.0.8", - "iferr": "^0.1.5", - "mkdirp": "^0.5.1", - "rimraf": "^2.5.4", - "run-queue": "^1.0.0" + "util": "0.10.3" + }, + "dependencies": { + "inherits": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.1.tgz", + "integrity": "sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE=", + "dev": true + }, + "util": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/util/-/util-0.10.3.tgz", + "integrity": "sha1-evsa/lCAUkZInj23/g7TeTNqwPk=", + "dev": true, + "requires": { + "inherits": "2.0.1" + } + } } }, - "copy-descriptor": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", - "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=" + "assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "dev": true }, - "core-js": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz", - "integrity": "sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY=" + "assign-symbols": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", + "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=" }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + "ast-types-flow": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", + "integrity": "sha1-9wtzXGvKGlycItmCw+Oef+ujva0=", + "dev": true }, - "cosmiconfig": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-2.2.2.tgz", - "integrity": "sha512-GiNXLwAFPYHy25XmTPpafYvn3CLAkJ8FLsscq78MQd1Kh0OU6Yzhn4eV2MVF4G9WEQZoWEGltatdR+ntGPMl5A==", - "requires": { - "is-directory": "^0.3.1", - "js-yaml": "^3.4.3", - "minimist": "^1.2.0", - "object-assign": "^4.1.0", - "os-homedir": "^1.0.1", - "parse-json": "^2.2.0", - "require-from-string": "^1.1.0" - } + "astral-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-1.0.0.tgz", + "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", + "dev": true }, - "coveralls": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/coveralls/-/coveralls-3.0.2.tgz", - "integrity": "sha512-Tv0LKe/MkBOilH2v7WBiTBdudg2ChfGbdXafc/s330djpF3zKOmuehTeRwjXWc7pzfj9FrDUTA7tEx6Div8NFw==", + "async": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", "dev": true, "requires": { - "growl": "~> 1.10.0", - "js-yaml": "^3.11.0", - "lcov-parse": "^0.0.10", - "log-driver": "^1.2.7", - "minimist": "^1.2.0", - "request": "^2.85.0" + "lodash": "^4.17.14" } }, - "cpx": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/cpx/-/cpx-1.5.0.tgz", - "integrity": "sha1-GFvgGFEdhycN7czCkxceN2VauI8=", - "requires": { - "babel-runtime": "^6.9.2", - "chokidar": "^1.6.0", - "duplexer": "^0.1.1", - "glob": "^7.0.5", - "glob2base": "^0.0.12", - "minimatch": "^3.0.2", - "mkdirp": "^0.5.1", - "resolve": "^1.1.7", - "safe-buffer": "^5.0.1", - "shell-quote": "^1.6.1", - "subarg": "^1.0.0" - } + "async-each": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.1.tgz", + "integrity": "sha1-GdOGodntxufByF04iu28xW0zYC0=", + "dev": true }, - "create-ecdh": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.3.tgz", - "integrity": "sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw==", - "requires": { - "bn.js": "^4.1.0", - "elliptic": "^6.0.0" - } + "async-limiter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.0.tgz", + "integrity": "sha512-jp/uFnooOiO+L211eZOoSyzpOITMXx1rBITauYykG3BRYPu8h0UcxsPNB04RR5vo4Tyz3+ay17tR6JVf9qzYWg==", + "dev": true }, - "create-error-class": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/create-error-class/-/create-error-class-3.0.2.tgz", - "integrity": "sha1-Br56vvlHo/FKMP1hBnHUAbyot7Y=", - "requires": { - "capture-stack-trace": "^1.0.0" - } + "asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "dev": true }, - "create-hash": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", - "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", - "requires": { - "cipher-base": "^1.0.1", - "inherits": "^2.0.1", - "md5.js": "^1.3.4", - "ripemd160": "^2.0.1", - "sha.js": "^2.4.0" - } + "atob": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", + "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==" }, - "create-hmac": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", - "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", + "attr-accept": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/attr-accept/-/attr-accept-1.1.3.tgz", + "integrity": "sha512-iT40nudw8zmCweivz6j58g+RT33I4KbaIvRUhjNmDwO2WmsQUxFEZZYZ5w3vXe5x5MX9D7mfvA/XaLOZYFR9EQ==", "requires": { - "cipher-base": "^1.0.3", - "create-hash": "^1.1.0", - "inherits": "^2.0.1", - "ripemd160": "^2.0.0", - "safe-buffer": "^5.0.1", - "sha.js": "^2.4.8" + "core-js": "^2.5.0" + }, + "dependencies": { + "core-js": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.1.tgz", + "integrity": "sha512-L72mmmEayPJBejKIWe2pYtGis5r0tQ5NaJekdhyXgeMQTpJoBsH0NL4ElY2LfSoV15xeQWKQ+XTTOZdyero5Xg==" + } } }, - "cross-spawn": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", - "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "autoprefixer": { + "version": "9.7.6", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-9.7.6.tgz", + "integrity": "sha512-F7cYpbN7uVVhACZTeeIeealwdGM6wMtfWARVLTy5xmKtgVdBNJvbDRoCK3YO1orcs7gv/KwYlb3iXwu9Ug9BkQ==", + "dev": true, "requires": { - "lru-cache": "^4.0.1", - "shebang-command": "^1.2.0", - "which": "^1.2.9" + "browserslist": "^4.11.1", + "caniuse-lite": "^1.0.30001039", + "chalk": "^2.4.2", + "normalize-range": "^0.1.2", + "num2fraction": "^1.2.2", + "postcss": "^7.0.27", + "postcss-value-parser": "^4.0.3" + }, + "dependencies": { + "browserslist": { + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.11.1.tgz", + "integrity": "sha512-DCTr3kDrKEYNw6Jb9HFxVLQNaue8z+0ZfRBRjmCunKDEXEBajKDj2Y+Uelg+Pi29OnvaSGwjOsnRyNEkXzHg5g==", + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30001038", + "electron-to-chromium": "^1.3.390", + "node-releases": "^1.1.53", + "pkg-up": "^2.0.0" + } + }, + "caniuse-lite": { + "version": "1.0.30001039", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001039.tgz", + "integrity": "sha512-SezbWCTT34eyFoWHgx8UWso7YtvtM7oosmFoXbCkdC6qJzRfBTeTgE9REtKtiuKXuMwWTZEvdnFNGAyVMorv8Q==", + "dev": true + }, + "electron-to-chromium": { + "version": "1.3.399", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.399.tgz", + "integrity": "sha512-+NBhK0/v17pls7CSh3Cx5Ir3tsGmtLPMMAO4Nz272bre2wzdykLEsev5wjOd3rYMt2/kSS681ufFT7Dywxq1sw==", + "dev": true + }, + "node-releases": { + "version": "1.1.53", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.53.tgz", + "integrity": "sha512-wp8zyQVwef2hpZ/dJH7SfSrIPD6YoJz6BDQDpGEkcA0s3LpAQoxBIYmfIq6QAhC1DhwsyCgTaTTcONwX8qzCuQ==", + "dev": true + }, + "postcss-value-parser": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.0.3.tgz", + "integrity": "sha512-N7h4pG+Nnu5BEIzyeaaIYWs0LI5XC40OrRh5L60z0QjFsqGWcHcbkBvpe1WYpcIS9yQ8sOi/vIPt1ejQCrMVrg==", + "dev": true + } } }, - "crypto-browserify": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz", - "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==", - "requires": { - "browserify-cipher": "^1.0.0", - "browserify-sign": "^4.0.0", - "create-ecdh": "^4.0.0", - "create-hash": "^1.1.0", - "create-hmac": "^1.1.0", - "diffie-hellman": "^5.0.0", - "inherits": "^2.0.1", - "pbkdf2": "^3.0.3", - "public-encrypt": "^4.0.0", - "randombytes": "^2.0.0", - "randomfill": "^1.0.3" - } + "aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "dev": true }, - "crypto-random-string": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-1.0.0.tgz", - "integrity": "sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=" + "aws4": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", + "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==", + "dev": true }, - "css-color-names": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/css-color-names/-/css-color-names-0.0.4.tgz", - "integrity": "sha1-gIrcLnnPhHOAabZGyyDsJ762KeA=" + "axobject-query": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.1.2.tgz", + "integrity": "sha512-ICt34ZmrVt8UQnvPl6TVyDTkmhXmAyAT4Jh5ugfGUX4MOrZ+U/ZY6/sdylRw3qGNr9Ub5AJsaHeDMzNLehRdOQ==", + "dev": true }, - "css-loader": { - "version": "0.28.7", - "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-0.28.7.tgz", - "integrity": "sha512-GxMpax8a/VgcfRrVy0gXD6yLd5ePYbXX/5zGgTVYp4wXtJklS8Z2VaUArJgc//f6/Dzil7BaJObdSv8eKKCPgg==", - "requires": { - "babel-code-frame": "^6.11.0", - "css-selector-tokenizer": "^0.7.0", - "cssnano": ">=2.6.1 <4", - "icss-utils": "^2.1.0", - "loader-utils": "^1.0.2", - "lodash.camelcase": "^4.3.0", - "object-assign": "^4.0.1", - "postcss": "^5.0.6", - "postcss-modules-extract-imports": "^1.0.0", - "postcss-modules-local-by-default": "^1.0.1", - "postcss-modules-scope": "^1.0.0", - "postcss-modules-values": "^1.1.0", - "postcss-value-parser": "^3.3.0", - "source-list-map": "^2.0.0" + "babel-code-frame": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", + "integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=", + "dev": true, + "requires": { + "chalk": "^1.1.3", + "esutils": "^2.0.2", + "js-tokens": "^3.0.2" }, "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", + "ansi-styles": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", + "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", + "dev": true + }, + "chalk": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", + "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "dev": true, "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" + "ansi-styles": "^2.2.1", + "escape-string-regexp": "^1.0.2", + "has-ansi": "^2.0.0", + "strip-ansi": "^3.0.0", + "supports-color": "^2.0.0" } }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "js-tokens": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", + "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=", + "dev": true + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, "requires": { - "has-flag": "^1.0.0" + "ansi-regex": "^2.0.0" } + }, + "supports-color": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", + "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", + "dev": true } } }, - "css-select": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-1.2.0.tgz", - "integrity": "sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg=", - "requires": { - "boolbase": "~1.0.0", - "css-what": "2.1", - "domutils": "1.5.1", - "nth-check": "~1.0.1" - } - }, - "css-selector-tokenizer": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/css-selector-tokenizer/-/css-selector-tokenizer-0.7.1.tgz", - "integrity": "sha512-xYL0AMZJ4gFzJQsHUKa5jiWWi2vH77WVNg7JYRyewwj6oPh4yb/y6Y9ZCw9dsj/9UauMhtuxR+ogQd//EdEVNA==", + "babel-eslint": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.1.0.tgz", + "integrity": "sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==", + "dev": true, "requires": { - "cssesc": "^0.1.0", - "fastparse": "^1.1.1", - "regexpu-core": "^1.0.0" + "@babel/code-frame": "^7.0.0", + "@babel/parser": "^7.7.0", + "@babel/traverse": "^7.7.0", + "@babel/types": "^7.7.0", + "eslint-visitor-keys": "^1.0.0", + "resolve": "^1.12.0" }, "dependencies": { - "regexpu-core": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-1.0.0.tgz", - "integrity": "sha1-hqdj9Y7k18L2sQLkdkBQ3n7ZDGs=", + "resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, "requires": { - "regenerate": "^1.2.1", - "regjsgen": "^0.2.0", - "regjsparser": "^0.1.4" + "path-parse": "^1.0.6" } } } }, - "css-vendor": { - "version": "0.3.8", - "resolved": "https://registry.npmjs.org/css-vendor/-/css-vendor-0.3.8.tgz", - "integrity": "sha1-ZCHP0wNM5mT+dnOXL9ARn8KJQfo=", + "babel-extract-comments": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/babel-extract-comments/-/babel-extract-comments-1.0.0.tgz", + "integrity": "sha512-qWWzi4TlddohA91bFwgt6zO/J0X+io7Qp184Fw0m2JYRSTZnJbFR8+07KmzudHCZgOiKRCrjhylwv9Xd8gfhVQ==", + "dev": true, "requires": { - "is-in-browser": "^1.0.2" + "babylon": "^6.18.0" } }, - "css-what": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-2.1.2.tgz", - "integrity": "sha512-wan8dMWQ0GUeF7DGEPVjhHemVW/vy6xUYmFzRY8RYqgA0JtXC9rJmbScBjqSu6dg9q0lwPQy6ZAmJVr3PPTvqQ==" - }, - "cssesc": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-0.1.0.tgz", - "integrity": "sha1-yBSQPkViM3GgR3tAEJqq++6t27Q=" + "babel-jest": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-24.9.0.tgz", + "integrity": "sha512-ntuddfyiN+EhMw58PTNL1ph4C9rECiQXjI4nMMBKBaNjXvqLdkXpPRcMSr4iyBrJg/+wz9brFUD6RhOAT6r4Iw==", + "dev": true, + "requires": { + "@jest/transform": "^24.9.0", + "@jest/types": "^24.9.0", + "@types/babel__core": "^7.1.0", + "babel-plugin-istanbul": "^5.1.0", + "babel-preset-jest": "^24.9.0", + "chalk": "^2.4.2", + "slash": "^2.0.0" + } }, - "cssnano": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-3.10.0.tgz", - "integrity": "sha1-Tzj2zqK5sX+gFJDyPx3GjqZcHDg=", + "babel-loader": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.1.0.tgz", + "integrity": "sha512-7q7nC1tYOrqvUrN3LQK4GwSk/TQorZSOlO9C+RZDZpODgyN4ZlCqE5q9cDsyWOliN+aU9B4JX01xK9eJXowJLw==", + "dev": true, "requires": { - "autoprefixer": "^6.3.1", - "decamelize": "^1.1.2", - "defined": "^1.0.0", - "has": "^1.0.1", - "object-assign": "^4.0.1", - "postcss": "^5.0.14", - "postcss-calc": "^5.2.0", - "postcss-colormin": "^2.1.8", - "postcss-convert-values": "^2.3.4", - "postcss-discard-comments": "^2.0.4", - "postcss-discard-duplicates": "^2.0.1", - "postcss-discard-empty": "^2.0.1", - "postcss-discard-overridden": "^0.1.1", - "postcss-discard-unused": "^2.2.1", - "postcss-filter-plugins": "^2.0.0", - "postcss-merge-idents": "^2.1.5", - "postcss-merge-longhand": "^2.0.1", - "postcss-merge-rules": "^2.0.3", - "postcss-minify-font-values": "^1.0.2", - "postcss-minify-gradients": "^1.0.1", - "postcss-minify-params": "^1.0.4", - "postcss-minify-selectors": "^2.0.4", - "postcss-normalize-charset": "^1.1.0", - "postcss-normalize-url": "^3.0.7", - "postcss-ordered-values": "^2.1.0", - "postcss-reduce-idents": "^2.2.2", - "postcss-reduce-initial": "^1.0.0", - "postcss-reduce-transforms": "^1.0.3", - "postcss-svgo": "^2.1.1", - "postcss-unique-selectors": "^2.0.2", - "postcss-value-parser": "^3.2.3", - "postcss-zindex": "^2.0.1" - }, - "dependencies": { - "autoprefixer": { - "version": "6.7.7", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-6.7.7.tgz", - "integrity": "sha1-Hb0cg1ZY41zj+ZhAmdsAWFx4IBQ=", - "requires": { - "browserslist": "^1.7.6", - "caniuse-db": "^1.0.30000634", - "normalize-range": "^0.1.2", - "num2fraction": "^1.2.2", - "postcss": "^5.2.16", - "postcss-value-parser": "^3.2.3" - } - }, - "browserslist": { - "version": "1.7.7", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-1.7.7.tgz", - "integrity": "sha1-C9dnBCWL6CmyOYu1Dkti0aFmsLk=", - "requires": { - "caniuse-db": "^1.0.30000639", - "electron-to-chromium": "^1.2.7" - } + "find-cache-dir": "^2.1.0", + "loader-utils": "^1.4.0", + "mkdirp": "^0.5.3", + "pify": "^4.0.1", + "schema-utils": "^2.6.5" + }, + "dependencies": { + "ajv-keywords": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", + "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==", + "dev": true }, - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" + "emojis-list": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", + "dev": true }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", + "loader-utils": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.0.tgz", + "integrity": "sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==", + "dev": true, "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^1.0.1" } }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + }, + "schema-utils": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.6.5.tgz", + "integrity": "sha512-5KXuwKziQrTVHh8j/Uxz+QUbxkaLW9X/86NBlx/gnKgtsZA2GIVMUn17qWhRFwF8jdYb3Dig5hRO/W5mZqy6SQ==", + "dev": true, "requires": { - "has-flag": "^1.0.0" + "ajv": "^6.12.0", + "ajv-keywords": "^3.4.1" } } } }, - "csso": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/csso/-/csso-2.3.2.tgz", - "integrity": "sha1-3dUsWHAz9J6Utx/FVWnyUuj/X4U=", + "babel-plugin-dynamic-import-node": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz", + "integrity": "sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ==", + "dev": true, "requires": { - "clap": "^1.0.9", - "source-map": "^0.5.3" + "object.assign": "^4.1.0" } }, - "cssom": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.4.tgz", - "integrity": "sha512-+7prCSORpXNeR4/fUP3rL+TzqtiFfhMvTd7uEqMdgPvLPt4+uzFUeufx5RHjGTACCargg/DiEt/moMQmvnfkog==" - }, - "cssstyle": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-1.1.1.tgz", - "integrity": "sha512-364AI1l/M5TYcFH83JnOH/pSqgaNnKmYgKrm0didZMGKWjQB60dymwWy1rKUgL3J1ffdq9xVi2yGLHdSjjSNog==", + "babel-plugin-istanbul": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-5.2.0.tgz", + "integrity": "sha512-5LphC0USA8t4i1zCtjbbNb6jJj/9+X6P37Qfirc/70EQ34xKlMW+a1RHGwxGI+SwWpNwZ27HqvzAobeqaXwiZw==", + "dev": true, "requires": { - "cssom": "0.3.x" - } - }, - "csstype": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-2.6.0.tgz", + "@babel/helper-plugin-utils": "^7.0.0", + "find-up": "^3.0.0", + "istanbul-lib-instrument": "^3.3.0", + "test-exclude": "^5.2.3" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + } + } + }, + "babel-plugin-jest-hoist": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-24.9.0.tgz", + "integrity": "sha512-2EMA2P8Vp7lG0RAzr4HXqtYwacfMErOuv1U3wrvxHX6rD1sV6xS3WXG3r8TRQ2r6w8OhvSdWt+z41hQNwNm3Xw==", + "dev": true, + "requires": { + "@types/babel__traverse": "^7.0.6" + } + }, + "babel-plugin-macros": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz", + "integrity": "sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg==", + "dev": true, + "requires": { + "@babel/runtime": "^7.7.2", + "cosmiconfig": "^6.0.0", + "resolve": "^1.12.0" + }, + "dependencies": { + "@babel/runtime": { + "version": "7.9.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.9.2.tgz", + "integrity": "sha512-NE2DtOdufG7R5vnfQUTehdTfNycfUANEtCa9PssN9O/xmTzP4E08UI797ixaei6hBEVL9BI/PsdJS5x7mWoB9Q==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "cosmiconfig": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz", + "integrity": "sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==", + "dev": true, + "requires": { + "@types/parse-json": "^4.0.0", + "import-fresh": "^3.1.0", + "parse-json": "^5.0.0", + "path-type": "^4.0.0", + "yaml": "^1.7.2" + } + }, + "import-fresh": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz", + "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + } + }, + "parse-json": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.0.0.tgz", + "integrity": "sha512-OOY5b7PAEFV0E2Fir1KOkxchnZNCdowAJgQ5NuxjpBKTRP3pQhwkrkxqQjeoKJ+fO7bCpmIZaogI4eZGDMEGOw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1", + "lines-and-columns": "^1.1.6" + } + }, + "path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true + }, + "regenerator-runtime": { + "version": "0.13.5", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", + "integrity": "sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==", + "dev": true + }, + "resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + } + } + }, + "babel-plugin-named-asset-import": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/babel-plugin-named-asset-import/-/babel-plugin-named-asset-import-0.3.6.tgz", + "integrity": "sha512-1aGDUfL1qOOIoqk9QKGIo2lANk+C7ko/fqH0uIyC71x3PEGz0uVP8ISgfEsFuG+FKmjHTvFK/nNM8dowpmUxLA==", + "dev": true + }, + "babel-plugin-syntax-object-rest-spread": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz", + "integrity": "sha1-/WU28rzhODb/o6VFjEkDpZe7O/U=", + "dev": true + }, + "babel-plugin-transform-object-rest-spread": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz", + "integrity": "sha1-DzZpLVD+9rfi1LOsFHgTepY7ewY=", + "dev": true, + "requires": { + "babel-plugin-syntax-object-rest-spread": "^6.8.0", + "babel-runtime": "^6.26.0" + } + }, + "babel-plugin-transform-react-remove-prop-types": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/babel-plugin-transform-react-remove-prop-types/-/babel-plugin-transform-react-remove-prop-types-0.4.24.tgz", + "integrity": "sha512-eqj0hVcJUR57/Ug2zE1Yswsw4LhuqqHhD+8v120T1cl3kjg76QwtyBrdIk4WVwK+lAhBJVYCd/v+4nc4y+8JsA==", + "dev": true + }, + "babel-preset-jest": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-24.9.0.tgz", + "integrity": "sha512-izTUuhE4TMfTRPF92fFwD2QfdXaZW08qvWTFCI51V8rW5x00UuPgc3ajRoWofXOuxjfcOM5zzSYsQS3H8KGCAg==", + "dev": true, + "requires": { + "@babel/plugin-syntax-object-rest-spread": "^7.0.0", + "babel-plugin-jest-hoist": "^24.9.0" + } + }, + "babel-preset-react-app": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/babel-preset-react-app/-/babel-preset-react-app-9.1.2.tgz", + "integrity": "sha512-k58RtQOKH21NyKtzptoAvtAODuAJJs3ZhqBMl456/GnXEQ/0La92pNmwgWoMn5pBTrsvk3YYXdY7zpY4e3UIxA==", + "dev": true, + "requires": { + "@babel/core": "7.9.0", + "@babel/plugin-proposal-class-properties": "7.8.3", + "@babel/plugin-proposal-decorators": "7.8.3", + "@babel/plugin-proposal-nullish-coalescing-operator": "7.8.3", + "@babel/plugin-proposal-numeric-separator": "7.8.3", + "@babel/plugin-proposal-optional-chaining": "7.9.0", + "@babel/plugin-transform-flow-strip-types": "7.9.0", + "@babel/plugin-transform-react-display-name": "7.8.3", + "@babel/plugin-transform-runtime": "7.9.0", + "@babel/preset-env": "7.9.0", + "@babel/preset-react": "7.9.1", + "@babel/preset-typescript": "7.9.0", + "@babel/runtime": "7.9.0", + "babel-plugin-macros": "2.8.0", + "babel-plugin-transform-react-remove-prop-types": "0.4.24" + }, + "dependencies": { + "@babel/preset-env": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.9.0.tgz", + "integrity": "sha512-712DeRXT6dyKAM/FMbQTV/FvRCms2hPCx+3weRjZ8iQVQWZejWWk1wwG6ViWMyqb/ouBbGOl5b6aCk0+j1NmsQ==", + "dev": true, + "requires": { + "@babel/compat-data": "^7.9.0", + "@babel/helper-compilation-targets": "^7.8.7", + "@babel/helper-module-imports": "^7.8.3", + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-proposal-async-generator-functions": "^7.8.3", + "@babel/plugin-proposal-dynamic-import": "^7.8.3", + "@babel/plugin-proposal-json-strings": "^7.8.3", + "@babel/plugin-proposal-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-proposal-numeric-separator": "^7.8.3", + "@babel/plugin-proposal-object-rest-spread": "^7.9.0", + "@babel/plugin-proposal-optional-catch-binding": "^7.8.3", + "@babel/plugin-proposal-optional-chaining": "^7.9.0", + "@babel/plugin-proposal-unicode-property-regex": "^7.8.3", + "@babel/plugin-syntax-async-generators": "^7.8.0", + "@babel/plugin-syntax-dynamic-import": "^7.8.0", + "@babel/plugin-syntax-json-strings": "^7.8.0", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.0", + "@babel/plugin-syntax-numeric-separator": "^7.8.0", + "@babel/plugin-syntax-object-rest-spread": "^7.8.0", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.0", + "@babel/plugin-syntax-optional-chaining": "^7.8.0", + "@babel/plugin-syntax-top-level-await": "^7.8.3", + "@babel/plugin-transform-arrow-functions": "^7.8.3", + "@babel/plugin-transform-async-to-generator": "^7.8.3", + "@babel/plugin-transform-block-scoped-functions": "^7.8.3", + "@babel/plugin-transform-block-scoping": "^7.8.3", + "@babel/plugin-transform-classes": "^7.9.0", + "@babel/plugin-transform-computed-properties": "^7.8.3", + "@babel/plugin-transform-destructuring": "^7.8.3", + "@babel/plugin-transform-dotall-regex": "^7.8.3", + "@babel/plugin-transform-duplicate-keys": "^7.8.3", + "@babel/plugin-transform-exponentiation-operator": "^7.8.3", + "@babel/plugin-transform-for-of": "^7.9.0", + "@babel/plugin-transform-function-name": "^7.8.3", + "@babel/plugin-transform-literals": "^7.8.3", + "@babel/plugin-transform-member-expression-literals": "^7.8.3", + "@babel/plugin-transform-modules-amd": "^7.9.0", + "@babel/plugin-transform-modules-commonjs": "^7.9.0", + "@babel/plugin-transform-modules-systemjs": "^7.9.0", + "@babel/plugin-transform-modules-umd": "^7.9.0", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.8.3", + "@babel/plugin-transform-new-target": "^7.8.3", + "@babel/plugin-transform-object-super": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.8.7", + "@babel/plugin-transform-property-literals": "^7.8.3", + "@babel/plugin-transform-regenerator": "^7.8.7", + "@babel/plugin-transform-reserved-words": "^7.8.3", + "@babel/plugin-transform-shorthand-properties": "^7.8.3", + "@babel/plugin-transform-spread": "^7.8.3", + "@babel/plugin-transform-sticky-regex": "^7.8.3", + "@babel/plugin-transform-template-literals": "^7.8.3", + "@babel/plugin-transform-typeof-symbol": "^7.8.4", + "@babel/plugin-transform-unicode-regex": "^7.8.3", + "@babel/preset-modules": "^0.1.3", + "@babel/types": "^7.9.0", + "browserslist": "^4.9.1", + "core-js-compat": "^3.6.2", + "invariant": "^2.2.2", + "levenary": "^1.1.1", + "semver": "^5.5.0" + } + }, + "@babel/preset-react": { + "version": "7.9.1", + "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.9.1.tgz", + "integrity": "sha512-aJBYF23MPj0RNdp/4bHnAP0NVqqZRr9kl0NAOP4nJCex6OYVio59+dnQzsAWFuogdLyeaKA1hmfUIVZkY5J+TQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.3", + "@babel/plugin-transform-react-display-name": "^7.8.3", + "@babel/plugin-transform-react-jsx": "^7.9.1", + "@babel/plugin-transform-react-jsx-development": "^7.9.0", + "@babel/plugin-transform-react-jsx-self": "^7.9.0", + "@babel/plugin-transform-react-jsx-source": "^7.9.0" + } + }, + "@babel/runtime": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.9.0.tgz", + "integrity": "sha512-cTIudHnzuWLS56ik4DnRnqqNf8MkdUzV4iFFI1h7Jo9xvrpQROYaAnaSd2mHLQAzzZAPfATynX5ord6YlNYNMA==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "@babel/types": { + "version": "7.9.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.9.5.tgz", + "integrity": "sha512-XjnvNqenk818r5zMaba+sLQjnbda31UfUURv3ei0qPQw4u+j2jMyJ5b11y8ZHYTRSI3NnInQkkkRT4fLqqPdHg==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.5", + "lodash": "^4.17.13", + "to-fast-properties": "^2.0.0" + } + }, + "browserslist": { + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.11.1.tgz", + "integrity": "sha512-DCTr3kDrKEYNw6Jb9HFxVLQNaue8z+0ZfRBRjmCunKDEXEBajKDj2Y+Uelg+Pi29OnvaSGwjOsnRyNEkXzHg5g==", + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30001038", + "electron-to-chromium": "^1.3.390", + "node-releases": "^1.1.53", + "pkg-up": "^2.0.0" + } + }, + "caniuse-lite": { + "version": "1.0.30001039", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001039.tgz", + "integrity": "sha512-SezbWCTT34eyFoWHgx8UWso7YtvtM7oosmFoXbCkdC6qJzRfBTeTgE9REtKtiuKXuMwWTZEvdnFNGAyVMorv8Q==", + "dev": true + }, + "electron-to-chromium": { + "version": "1.3.399", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.399.tgz", + "integrity": "sha512-+NBhK0/v17pls7CSh3Cx5Ir3tsGmtLPMMAO4Nz272bre2wzdykLEsev5wjOd3rYMt2/kSS681ufFT7Dywxq1sw==", + "dev": true + }, + "node-releases": { + "version": "1.1.53", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.53.tgz", + "integrity": "sha512-wp8zyQVwef2hpZ/dJH7SfSrIPD6YoJz6BDQDpGEkcA0s3LpAQoxBIYmfIq6QAhC1DhwsyCgTaTTcONwX8qzCuQ==", + "dev": true + }, + "regenerator-runtime": { + "version": "0.13.5", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", + "integrity": "sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==", + "dev": true + } + } + }, + "babel-runtime": { + "version": "6.26.0", + "resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz", + "integrity": "sha1-llxwWGaOgrVde/4E/yM3vItWR/4=", + "requires": { + "core-js": "^2.4.0", + "regenerator-runtime": "^0.11.0" + }, + "dependencies": { + "core-js": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.1.tgz", + "integrity": "sha512-L72mmmEayPJBejKIWe2pYtGis5r0tQ5NaJekdhyXgeMQTpJoBsH0NL4ElY2LfSoV15xeQWKQ+XTTOZdyero5Xg==" + }, + "regenerator-runtime": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz", + "integrity": "sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==" + } + } + }, + "babylon": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz", + "integrity": "sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==", + "dev": true + }, + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + }, + "base": { + "version": "0.11.2", + "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", + "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", + "requires": { + "cache-base": "^1.0.1", + "class-utils": "^0.3.5", + "component-emitter": "^1.2.1", + "define-property": "^1.0.0", + "isobject": "^3.0.1", + "mixin-deep": "^1.2.0", + "pascalcase": "^0.1.1" + }, + "dependencies": { + "define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "requires": { + "is-descriptor": "^1.0.0" + } + }, + "is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "requires": { + "kind-of": "^6.0.0" + } + }, + "is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "requires": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + } + } + } + }, + "base64-js": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", + "integrity": "sha512-ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw==" + }, + "batch": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz", + "integrity": "sha1-3DQxT05nkxgJP8dgJyUl+UvyXBY=", + "dev": true + }, + "bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "dev": true, + "requires": { + "tweetnacl": "^0.14.3" + } + }, + "bfj": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/bfj/-/bfj-6.1.2.tgz", + "integrity": "sha512-BmBJa4Lip6BPRINSZ0BPEIfB1wUY/9rwbwvIHQA1KjX9om29B6id0wnWXq7m3bn5JrUVjeOTnVuhPT1FiHwPGw==", + "dev": true, + "requires": { + "bluebird": "^3.5.5", + "check-types": "^8.0.3", + "hoopy": "^0.1.4", + "tryer": "^1.0.1" + } + }, + "big.js": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/big.js/-/big.js-5.2.2.tgz", + "integrity": "sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==", + "dev": true + }, + "binary-extensions": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.12.0.tgz", + "integrity": "sha512-DYWGk01lDcxeS/K9IHPGWfT8PsJmbXRtRd2Sx72Tnb8pcYZQFF1oSDb8hJtS1vhp212q1Rzi5dUf9+nq0o9UIg==", + "dev": true + }, + "bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "dev": true, + "optional": true, + "requires": { + "file-uri-to-path": "1.0.0" + } + }, + "bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", + "dev": true + }, + "bn.js": { + "version": "4.11.8", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.8.tgz", + "integrity": "sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA==", + "dev": true + }, + "body-parser": { + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", + "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", + "dev": true, + "requires": { + "bytes": "3.1.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "on-finished": "~2.3.0", + "qs": "6.7.0", + "raw-body": "2.4.0", + "type-is": "~1.6.17" + }, + "dependencies": { + "bytes": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==", + "dev": true + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "http-errors": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", + "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", + "dev": true, + "requires": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + } + }, + "qs": { + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", + "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==", + "dev": true + }, + "setprototypeof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==", + "dev": true + }, + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", + "dev": true + } + } + }, + "bonjour": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/bonjour/-/bonjour-3.5.0.tgz", + "integrity": "sha1-jokKGD2O6aI5OzhExpGkK897yfU=", + "dev": true, + "requires": { + "array-flatten": "^2.1.0", + "deep-equal": "^1.0.1", + "dns-equal": "^1.0.0", + "dns-txt": "^2.0.2", + "multicast-dns": "^6.0.1", + "multicast-dns-service-types": "^1.1.0" + } + }, + "boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=", + "dev": true + }, + "brace": { + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/brace/-/brace-0.11.1.tgz", + "integrity": "sha1-SJb8ydVE7vRfS7dmDbMg07N5/lg=" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "dependencies": { + "balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", + "dev": true + } + } + }, + "braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "requires": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "dependencies": { + "extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "requires": { + "is-extendable": "^0.1.0" + } + } + } + }, + "brcast": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/brcast/-/brcast-3.0.1.tgz", + "integrity": "sha512-eI3yqf9YEqyGl9PCNTR46MGvDylGtaHjalcz6Q3fAPnP/PhpKkkve52vFdfGpwp4VUvK6LUr4TQN+2stCrEwTg==" + }, + "brorand": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=", + "dev": true + }, + "browser-process-hrtime": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", + "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==", + "dev": true + }, + "browser-resolve": { + "version": "1.11.3", + "resolved": "https://registry.npmjs.org/browser-resolve/-/browser-resolve-1.11.3.tgz", + "integrity": "sha512-exDi1BYWB/6raKHmDTCicQfTkqwN5fioMFV4j8BsfMU4R2DK/QfZfK7kOVkmWCNANf0snkBzqGqAJBao9gZMdQ==", + "dev": true, + "requires": { + "resolve": "1.1.7" + }, + "dependencies": { + "resolve": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.1.7.tgz", + "integrity": "sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs=", + "dev": true + } + } + }, + "browserify-aes": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", + "dev": true, + "requires": { + "buffer-xor": "^1.0.3", + "cipher-base": "^1.0.0", + "create-hash": "^1.1.0", + "evp_bytestokey": "^1.0.3", + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "browserify-cipher": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/browserify-cipher/-/browserify-cipher-1.0.1.tgz", + "integrity": "sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w==", + "dev": true, + "requires": { + "browserify-aes": "^1.0.4", + "browserify-des": "^1.0.0", + "evp_bytestokey": "^1.0.0" + } + }, + "browserify-des": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/browserify-des/-/browserify-des-1.0.2.tgz", + "integrity": "sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A==", + "dev": true, + "requires": { + "cipher-base": "^1.0.1", + "des.js": "^1.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "browserify-rsa": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", + "integrity": "sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ=", + "dev": true, + "requires": { + "bn.js": "^4.1.0", + "randombytes": "^2.0.1" + } + }, + "browserify-sign": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.0.4.tgz", + "integrity": "sha1-qk62jl17ZYuqa/alfmMMvXqT0pg=", + "dev": true, + "requires": { + "bn.js": "^4.1.1", + "browserify-rsa": "^4.0.0", + "create-hash": "^1.1.0", + "create-hmac": "^1.1.2", + "elliptic": "^6.0.0", + "inherits": "^2.0.1", + "parse-asn1": "^5.0.0" + } + }, + "browserify-zlib": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/browserify-zlib/-/browserify-zlib-0.2.0.tgz", + "integrity": "sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==", + "dev": true, + "requires": { + "pako": "~1.0.5" + } + }, + "browserslist": { + "version": "4.8.7", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.8.7.tgz", + "integrity": "sha512-gFOnZNYBHrEyUML0xr5NJ6edFaaKbTFX9S9kQHlYfCP0Rit/boRIz4G+Avq6/4haEKJXdGGUnoolx+5MWW2BoA==", + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30001027", + "electron-to-chromium": "^1.3.349", + "node-releases": "^1.1.49" + } + }, + "bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "requires": { + "node-int64": "^0.4.0" + } + }, + "buffer": { + "version": "4.9.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", + "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", + "dev": true, + "requires": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } + }, + "buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=", + "dev": true + }, + "buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", + "dev": true + }, + "buffer-indexof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-indexof/-/buffer-indexof-1.1.1.tgz", + "integrity": "sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g==", + "dev": true + }, + "buffer-xor": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", + "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=", + "dev": true + }, + "builtin-modules": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", + "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", + "dev": true + }, + "builtin-status-codes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz", + "integrity": "sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug=", + "dev": true + }, + "bytes": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz", + "integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg=", + "dev": true + }, + "cacache": { + "version": "12.0.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.3.tgz", + "integrity": "sha512-kqdmfXEGFepesTuROHMs3MpFLWrPkSSpRqOw80RCflZXy/khxaArvFrQ7uJxSUduzAufc6G0g1VUCOZXxWavPw==", + "dev": true, + "requires": { + "bluebird": "^3.5.5", + "chownr": "^1.1.1", + "figgy-pudding": "^3.5.1", + "glob": "^7.1.4", + "graceful-fs": "^4.1.15", + "infer-owner": "^1.0.3", + "lru-cache": "^5.1.1", + "mississippi": "^3.0.0", + "mkdirp": "^0.5.1", + "move-concurrently": "^1.0.1", + "promise-inflight": "^1.0.1", + "rimraf": "^2.6.3", + "ssri": "^6.0.1", + "unique-filename": "^1.1.1", + "y18n": "^4.0.0" + }, + "dependencies": { + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } + } + }, + "cache-base": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", + "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", + "requires": { + "collection-visit": "^1.0.0", + "component-emitter": "^1.2.1", + "get-value": "^2.0.6", + "has-value": "^1.0.0", + "isobject": "^3.0.1", + "set-value": "^2.0.0", + "to-object-path": "^0.3.0", + "union-value": "^1.0.0", + "unset-value": "^1.0.0" + } + }, + "call-me-maybe": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.1.tgz", + "integrity": "sha1-JtII6onje1y95gJQoV8DHBak1ms=", + "dev": true + }, + "caller-callsite": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/caller-callsite/-/caller-callsite-2.0.0.tgz", + "integrity": "sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ=", + "dev": true, + "requires": { + "callsites": "^2.0.0" + } + }, + "caller-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-2.0.0.tgz", + "integrity": "sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ=", + "dev": true, + "requires": { + "caller-callsite": "^2.0.0" + } + }, + "callsites": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", + "integrity": "sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=", + "dev": true + }, + "camel-case": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-3.0.0.tgz", + "integrity": "sha1-yjw2iKTpzzpM2nd9xNy8cTJJz3M=", + "dev": true, + "requires": { + "no-case": "^2.2.0", + "upper-case": "^1.1.1" + } + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "camelcase-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-2.1.0.tgz", + "integrity": "sha1-MIvur/3ygRkFHvodkyITyRuPkuc=", + "dev": true, + "requires": { + "camelcase": "^2.0.0", + "map-obj": "^1.0.0" + }, + "dependencies": { + "camelcase": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-2.1.1.tgz", + "integrity": "sha1-fB0W1nmhu+WcoCys7PsBHiAfWh8=", + "dev": true + } + } + }, + "camelize": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/camelize/-/camelize-1.0.0.tgz", + "integrity": "sha1-FkpUg+Yw+kMh5a8HAg5TGDGyYJs=" + }, + "caniuse-api": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/caniuse-api/-/caniuse-api-3.0.0.tgz", + "integrity": "sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==", + "dev": true, + "requires": { + "browserslist": "^4.0.0", + "caniuse-lite": "^1.0.0", + "lodash.memoize": "^4.1.2", + "lodash.uniq": "^4.5.0" + } + }, + "caniuse-lite": { + "version": "1.0.30001028", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001028.tgz", + "integrity": "sha512-Vnrq+XMSHpT7E+LWoIYhs3Sne8h9lx9YJV3acH3THNCwU/9zV93/ta4xVfzTtnqd3rvnuVpVjE3DFqf56tr3aQ==", + "dev": true + }, + "capture-exit": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/capture-exit/-/capture-exit-2.0.0.tgz", + "integrity": "sha512-PiT/hQmTonHhl/HFGN+Lx3JJUznrVYJ3+AQsnthneZbvW7x+f08Tk7yLJTLEOUvBTbduLeeBkxEaYXUOUrRq6g==", + "dev": true, + "requires": { + "rsvp": "^4.8.4" + } + }, + "cardinal": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/cardinal/-/cardinal-2.1.1.tgz", + "integrity": "sha1-fMEFXYItISlU0HsIXeolHMe8VQU=", + "dev": true, + "requires": { + "ansicolors": "~0.3.2", + "redeyed": "~2.1.0" + } + }, + "case-sensitive-paths-webpack-plugin": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.3.0.tgz", + "integrity": "sha512-/4YgnZS8y1UXXmC02xD5rRrBEu6T5ub+mQHLNRj0fzTRbgdBYhsNo2V5EqwgqrExjxsjtF/OpAKAMkKsxbD5XQ==", + "dev": true + }, + "caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "change-case": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/change-case/-/change-case-3.0.2.tgz", + "integrity": "sha512-Mww+SLF6MZ0U6kdg11algyKd5BARbyM4TbFBepwowYSR5ClfQGCGtxNXgykpN0uF/bstWeaGDT4JWaDh8zWAHA==", + "dev": true, + "requires": { + "camel-case": "^3.0.0", + "constant-case": "^2.0.0", + "dot-case": "^2.1.0", + "header-case": "^1.0.0", + "is-lower-case": "^1.1.0", + "is-upper-case": "^1.1.0", + "lower-case": "^1.1.1", + "lower-case-first": "^1.0.0", + "no-case": "^2.3.2", + "param-case": "^2.1.0", + "pascal-case": "^2.0.0", + "path-case": "^2.1.0", + "sentence-case": "^2.1.0", + "snake-case": "^2.1.0", + "swap-case": "^1.1.0", + "title-case": "^2.1.0", + "upper-case": "^1.1.1", + "upper-case-first": "^1.1.0" + } + }, + "change-emitter": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/change-emitter/-/change-emitter-0.1.6.tgz", + "integrity": "sha1-6LL+PX8at9aaMhma/5HqaTFAlRU=" + }, + "chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, + "check-types": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/check-types/-/check-types-8.0.3.tgz", + "integrity": "sha512-YpeKZngUmG65rLudJ4taU7VLkOCTMhNl/u4ctNC56LQS/zJTyNH0Lrtwm1tfTsbLlwvlfsA2d1c8vCf/Kh2KwQ==", + "dev": true + }, + "cheerio": { + "version": "1.0.0-rc.3", + "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.3.tgz", + "integrity": "sha512-0td5ijfUPuubwLUu0OBoe98gZj8C/AA+RW3v67GPlGOrvxWjZmBXiBCRU+I8VEiNyJzjth40POfHiz2RB3gImA==", + "dev": true, + "requires": { + "css-select": "~1.2.0", + "dom-serializer": "~0.1.1", + "entities": "~1.1.1", + "htmlparser2": "^3.9.1", + "lodash": "^4.15.0", + "parse5": "^3.0.1" + }, + "dependencies": { + "dom-serializer": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.1.tgz", + "integrity": "sha512-l0IU0pPzLWSHBcieZbpOKgkIn3ts3vAh7ZuFyXNwJxJXk/c4Gwj9xaTJwIDVQCXawWD0qb3IzMGH5rglQaO0XA==", + "dev": true, + "requires": { + "domelementtype": "^1.3.0", + "entities": "^1.1.1" + } + }, + "domhandler": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.4.2.tgz", + "integrity": "sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==", + "dev": true, + "requires": { + "domelementtype": "1" + } + }, + "htmlparser2": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.10.1.tgz", + "integrity": "sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ==", + "dev": true, + "requires": { + "domelementtype": "^1.3.1", + "domhandler": "^2.3.0", + "domutils": "^1.5.1", + "entities": "^1.1.1", + "inherits": "^2.0.1", + "readable-stream": "^3.1.1" + } + }, + "parse5": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-3.0.3.tgz", + "integrity": "sha512-rgO9Zg5LLLkfJF9E6CCmXlSE4UVceloys8JrFqCcHloC3usd/kJCyPDwH2SOlzix2j3xaP9sUX3e8+kvkuleAA==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "readable-stream": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "chokidar": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.3.1.tgz", + "integrity": "sha512-4QYCEWOcK3OJrxwvyyAOxFuhpvOVCYkr33LPfFNBjAD/w3sEzWsp2BUOkI4l9bHvWioAd0rc6NlHUOEaWkTeqg==", + "dev": true, + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.1.2", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.3.0" + }, + "dependencies": { + "anymatch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", + "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "binary-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.0.0.tgz", + "integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==", + "dev": true + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "fsevents": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.2.tgz", + "integrity": "sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==", + "dev": true, + "optional": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "readdirp": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.3.0.tgz", + "integrity": "sha512-zz0pAkSPOXXm1viEwygWIPSPkcBYjW1xU5j/JBh5t9bGCJwa6f9+BJa6VaB2g+b55yVrmXzqkyLf4xaWYM0IkQ==", + "dev": true, + "requires": { + "picomatch": "^2.0.7" + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + } + } + }, + "chownr": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.1.tgz", + "integrity": "sha512-j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g==", + "dev": true + }, + "chrome-trace-event": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.2.tgz", + "integrity": "sha512-9e/zx1jw7B4CO+c/RXoCsfg/x1AfUBioy4owYH0bJprEYAx5hRFLRhWBqHAG57D0ZM4H7vxbP7bPe0VwhQRYDQ==", + "dev": true, + "requires": { + "tslib": "^1.9.0" + } + }, + "ci-info": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", + "dev": true + }, + "cipher-base": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", + "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "class-utils": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", + "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", + "requires": { + "arr-union": "^3.1.0", + "define-property": "^0.2.5", + "isobject": "^3.0.0", + "static-extend": "^0.1.1" + }, + "dependencies": { + "define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "requires": { + "is-descriptor": "^0.1.0" + } + } + } + }, + "classnames": { + "version": "2.2.6", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.2.6.tgz", + "integrity": "sha512-JR/iSQOSt+LQIWwrwEzJ9uk0xfN3mTVYMwt1Ir5mUcSN6pU+V4zQFFaJsclJbPuAUQH+yfWef6tm7l1quW3C8Q==" + }, + "clean-css": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-4.2.3.tgz", + "integrity": "sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA==", + "dev": true, + "requires": { + "source-map": "~0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true + }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "requires": { + "restore-cursor": "^3.1.0" + } + }, + "cli-table": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/cli-table/-/cli-table-0.3.1.tgz", + "integrity": "sha1-9TsFJmqLGguTSz0IIebi3FkUriM=", + "dev": true, + "requires": { + "colors": "1.0.3" + } + }, + "cli-usage": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/cli-usage/-/cli-usage-0.1.8.tgz", + "integrity": "sha512-EZJ+ty1TsqdnhZNt2QbI+ed3IUNHTH31blSOJLVph3oL4IExskPRyCDGJH7RuCBPy3QBmWgpbeUxXPhK0isXIw==", + "dev": true, + "requires": { + "marked": "^0.5.0", + "marked-terminal": "^3.0.0" + } + }, + "cli-width": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", + "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", + "dev": true + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "dev": true, + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } + }, + "clone-deep": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-0.2.4.tgz", + "integrity": "sha1-TnPdCen7lxzDhnDF3O2cGJZIHMY=", + "dev": true, + "requires": { + "for-own": "^0.1.3", + "is-plain-object": "^2.0.1", + "kind-of": "^3.0.2", + "lazy-cache": "^1.0.3", + "shallow-clone": "^0.1.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } + }, + "co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", + "dev": true + }, + "coa": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/coa/-/coa-2.0.2.tgz", + "integrity": "sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA==", + "dev": true, + "requires": { + "@types/q": "^1.5.1", + "chalk": "^2.4.1", + "q": "^1.1.2" + } + }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", + "dev": true + }, + "collection-visit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", + "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", + "requires": { + "map-visit": "^1.0.0", + "object-visit": "^1.0.0" + } + }, + "color": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/color/-/color-3.1.2.tgz", + "integrity": "sha512-vXTJhHebByxZn3lDvDJYw4lR5+uB3vuoHsuYA5AKuxRVn5wzzIfQKGLBmgdVRHKTJYeK5rvJcHnrd0Li49CFpg==", + "dev": true, + "requires": { + "color-convert": "^1.9.1", + "color-string": "^1.5.2" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "color-string": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.3.tgz", + "integrity": "sha512-dC2C5qeWoYkxki5UAXapdjqO672AM4vZuPGRQfO8b5HKuKGBbKWpITyDYN7TOFKvRW7kOgAn3746clDBMDJyQw==", + "dev": true, + "requires": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, + "colors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", + "integrity": "sha1-BDP0TYCWgP3rYO0mDxsMJi6CpAs=", + "dev": true + }, + "combined-stream": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz", + "integrity": "sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w==", + "dev": true, + "requires": { + "delayed-stream": "~1.0.0" + } + }, + "commander": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.19.0.tgz", + "integrity": "sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg==" + }, + "common-tags": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/common-tags/-/common-tags-1.8.0.tgz", + "integrity": "sha512-6P6g0uetGpW/sdyUy/iQQCbFF0kWVMSIVSyYz7Zgjcgh8mgw8PQzDNZeyZ5DQ2gM7LBoZPHmnjz8rUthkBG5tw==", + "dev": true + }, + "commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", + "dev": true + }, + "component-emitter": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz", + "integrity": "sha1-E3kY1teCg/ffemt8WmPhQOaUJeY=" + }, + "compose-function": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/compose-function/-/compose-function-3.0.3.tgz", + "integrity": "sha1-ntZ18TzFRQHTCVCkhv9qe6OrGF8=", + "dev": true, + "requires": { + "arity-n": "^1.0.4" + } + }, + "compressible": { + "version": "2.0.15", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.15.tgz", + "integrity": "sha512-4aE67DL33dSW9gw4CI2H/yTxqHLNcxp0yS6jB+4h+wr3e43+1z7vm0HU9qXOH8j+qjKuL8+UtkOxYQSMq60Ylw==", + "dev": true, + "requires": { + "mime-db": ">= 1.36.0 < 2" + } + }, + "compression": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.7.4.tgz", + "integrity": "sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==", + "dev": true, + "requires": { + "accepts": "~1.3.5", + "bytes": "3.0.0", + "compressible": "~2.0.16", + "debug": "2.6.9", + "on-headers": "~1.0.2", + "safe-buffer": "5.1.2", + "vary": "~1.1.2" + }, + "dependencies": { + "compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "dev": true, + "requires": { + "mime-db": ">= 1.43.0 < 2" + } + }, + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "mime-db": { + "version": "1.43.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz", + "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==", + "dev": true + } + } + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "confusing-browser-globals": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.9.tgz", + "integrity": "sha512-KbS1Y0jMtyPgIxjO7ZzMAuUpAKMt1SzCL9fsrKsX6b0zJPTaT0SiSPmewwVZg9UAO83HVIlEhZF84LIjZ0lmAw==", + "dev": true + }, + "connect-history-api-fallback": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-1.6.0.tgz", + "integrity": "sha512-e54B99q/OUoH64zYYRf3HBP5z24G38h5D3qXu23JGRoigpX5Ss4r9ZnDk3g0Z8uQC2x2lPaJ+UlWBc1ZWBWdLg==", + "dev": true + }, + "console-browserify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.1.0.tgz", + "integrity": "sha1-8CQcRXMKn8YyOyBtvzjtx0HQuxA=", + "dev": true, + "requires": { + "date-now": "^0.1.4" + } + }, + "constant-case": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/constant-case/-/constant-case-2.0.0.tgz", + "integrity": "sha1-QXV2TTidP6nI7NKRhu1gBSQ7akY=", + "dev": true, + "requires": { + "snake-case": "^2.1.0", + "upper-case": "^1.1.1" + } + }, + "constants-browserify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/constants-browserify/-/constants-browserify-1.0.0.tgz", + "integrity": "sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U=", + "dev": true + }, + "contains-path": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", + "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", + "dev": true + }, + "content-disposition": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", + "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==", + "dev": true, + "requires": { + "safe-buffer": "5.1.2" + } + }, + "content-type": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==", + "dev": true + }, + "convert-source-map": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", + "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.1" + } + }, + "cookie": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", + "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==", + "dev": true + }, + "cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=", + "dev": true + }, + "copy-concurrently": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/copy-concurrently/-/copy-concurrently-1.0.5.tgz", + "integrity": "sha512-f2domd9fsVDFtaFcbaRZuYXwtdmnzqbADSwhSWYxYB/Q8zsdUUFMXVRwXGDMWmbEzAn1kdRrtI1T/KTFOL4X2A==", + "dev": true, + "requires": { + "aproba": "^1.1.1", + "fs-write-stream-atomic": "^1.0.8", + "iferr": "^0.1.5", + "mkdirp": "^0.5.1", + "rimraf": "^2.5.4", + "run-queue": "^1.0.0" + } + }, + "copy-descriptor": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", + "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=" + }, + "core-js": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-1.2.7.tgz", + "integrity": "sha1-ZSKUwUZR2yj6k70tX/KYOk8IxjY=" + }, + "core-js-compat": { + "version": "3.6.4", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.6.4.tgz", + "integrity": "sha512-zAa3IZPvsJ0slViBQ2z+vgyyTuhd3MFn1rBQjZSKVEgB0UMYhUkCj9jJUVPgGTGqWvsBVmfnruXgTcNyTlEiSA==", + "dev": true, + "requires": { + "browserslist": "^4.8.3", + "semver": "7.0.0" + }, + "dependencies": { + "semver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", + "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", + "dev": true + } + } + }, + "core-js-pure": { + "version": "3.6.4", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.6.4.tgz", + "integrity": "sha512-epIhRLkXdgv32xIUFaaAry2wdxZYBi6bgM7cB136dzzXXa+dFyRLTZeLUJxnd8ShrmyVXBub63n2NHo2JAt8Cw==", + "dev": true + }, + "core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", + "dev": true + }, + "cosmiconfig": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-5.2.1.tgz", + "integrity": "sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA==", + "dev": true, + "requires": { + "import-fresh": "^2.0.0", + "is-directory": "^0.3.1", + "js-yaml": "^3.13.1", + "parse-json": "^4.0.0" + }, + "dependencies": { + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, + "requires": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + } + } + } + }, + "coveralls": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/coveralls/-/coveralls-3.0.2.tgz", + "integrity": "sha512-Tv0LKe/MkBOilH2v7WBiTBdudg2ChfGbdXafc/s330djpF3zKOmuehTeRwjXWc7pzfj9FrDUTA7tEx6Div8NFw==", + "dev": true, + "requires": { + "growl": "~> 1.10.0", + "js-yaml": "^3.11.0", + "lcov-parse": "^0.0.10", + "log-driver": "^1.2.7", + "minimist": "^1.2.0", + "request": "^2.85.0" + } + }, + "create-ecdh": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.3.tgz", + "integrity": "sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw==", + "dev": true, + "requires": { + "bn.js": "^4.1.0", + "elliptic": "^6.0.0" + } + }, + "create-hash": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", + "dev": true, + "requires": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "md5.js": "^1.3.4", + "ripemd160": "^2.0.1", + "sha.js": "^2.4.0" + } + }, + "create-hmac": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", + "dev": true, + "requires": { + "cipher-base": "^1.0.3", + "create-hash": "^1.1.0", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "crypto-browserify": { + "version": "3.12.0", + "resolved": "https://registry.npmjs.org/crypto-browserify/-/crypto-browserify-3.12.0.tgz", + "integrity": "sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==", + "dev": true, + "requires": { + "browserify-cipher": "^1.0.0", + "browserify-sign": "^4.0.0", + "create-ecdh": "^4.0.0", + "create-hash": "^1.1.0", + "create-hmac": "^1.1.0", + "diffie-hellman": "^5.0.0", + "inherits": "^2.0.1", + "pbkdf2": "^3.0.3", + "public-encrypt": "^4.0.0", + "randombytes": "^2.0.0", + "randomfill": "^1.0.3" + } + }, + "css": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/css/-/css-2.2.4.tgz", + "integrity": "sha512-oUnjmWpy0niI3x/mPL8dVEI1l7MnG3+HHyRPHf+YFSbK+svOhXpmSOcDURUh2aOCgl2grzrOPt1nHLuCVFULLw==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "source-map": "^0.6.1", + "source-map-resolve": "^0.5.2", + "urix": "^0.1.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "css-blank-pseudo": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/css-blank-pseudo/-/css-blank-pseudo-0.1.4.tgz", + "integrity": "sha512-LHz35Hr83dnFeipc7oqFDmsjHdljj3TQtxGGiNWSOsTLIAubSm4TEz8qCaKFpk7idaQ1GfWscF4E6mgpBysA1w==", + "dev": true, + "requires": { + "postcss": "^7.0.5" + } + }, + "css-color-keywords": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/css-color-keywords/-/css-color-keywords-1.0.0.tgz", + "integrity": "sha1-/qJhbcZ2spYmhrOvjb2+GAskTgU=" + }, + "css-color-names": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/css-color-names/-/css-color-names-0.0.4.tgz", + "integrity": "sha1-gIrcLnnPhHOAabZGyyDsJ762KeA=", + "dev": true + }, + "css-declaration-sorter": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-4.0.1.tgz", + "integrity": "sha512-BcxQSKTSEEQUftYpBVnsH4SF05NTuBokb19/sBt6asXGKZ/6VP7PLG1CBCkFDYOnhXhPh0jMhO6xZ71oYHXHBA==", + "dev": true, + "requires": { + "postcss": "^7.0.1", + "timsort": "^0.3.0" + } + }, + "css-has-pseudo": { + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/css-has-pseudo/-/css-has-pseudo-0.10.0.tgz", + "integrity": "sha512-Z8hnfsZu4o/kt+AuFzeGpLVhFOGO9mluyHBaA2bA8aCGTwah5sT3WV/fTHH8UNZUytOIImuGPrl/prlb4oX4qQ==", + "dev": true, + "requires": { + "postcss": "^7.0.6", + "postcss-selector-parser": "^5.0.0-rc.4" + }, + "dependencies": { + "cssesc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-2.0.0.tgz", + "integrity": "sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg==", + "dev": true + }, + "postcss-selector-parser": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-5.0.0.tgz", + "integrity": "sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ==", + "dev": true, + "requires": { + "cssesc": "^2.0.0", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" + } + } + } + }, + "css-loader": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-3.4.2.tgz", + "integrity": "sha512-jYq4zdZT0oS0Iykt+fqnzVLRIeiPWhka+7BqPn+oSIpWJAHak5tmB/WZrJ2a21JhCeFyNnnlroSl8c+MtVndzA==", + "dev": true, + "requires": { + "camelcase": "^5.3.1", + "cssesc": "^3.0.0", + "icss-utils": "^4.1.1", + "loader-utils": "^1.2.3", + "normalize-path": "^3.0.0", + "postcss": "^7.0.23", + "postcss-modules-extract-imports": "^2.0.0", + "postcss-modules-local-by-default": "^3.0.2", + "postcss-modules-scope": "^2.1.1", + "postcss-modules-values": "^3.0.0", + "postcss-value-parser": "^4.0.2", + "schema-utils": "^2.6.0" + }, + "dependencies": { + "ajv-keywords": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", + "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==", + "dev": true + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "postcss-value-parser": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.0.3.tgz", + "integrity": "sha512-N7h4pG+Nnu5BEIzyeaaIYWs0LI5XC40OrRh5L60z0QjFsqGWcHcbkBvpe1WYpcIS9yQ8sOi/vIPt1ejQCrMVrg==", + "dev": true + }, + "schema-utils": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.6.5.tgz", + "integrity": "sha512-5KXuwKziQrTVHh8j/Uxz+QUbxkaLW9X/86NBlx/gnKgtsZA2GIVMUn17qWhRFwF8jdYb3Dig5hRO/W5mZqy6SQ==", + "dev": true, + "requires": { + "ajv": "^6.12.0", + "ajv-keywords": "^3.4.1" + } + } + } + }, + "css-prefers-color-scheme": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/css-prefers-color-scheme/-/css-prefers-color-scheme-3.1.1.tgz", + "integrity": "sha512-MTu6+tMs9S3EUqzmqLXEcgNRbNkkD/TGFvowpeoWJn5Vfq7FMgsmRQs9X5NXAURiOBmOxm/lLjsDNXDE6k9bhg==", + "dev": true, + "requires": { + "postcss": "^7.0.5" + } + }, + "css-select": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-1.2.0.tgz", + "integrity": "sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg=", + "dev": true, + "requires": { + "boolbase": "~1.0.0", + "css-what": "2.1", + "domutils": "1.5.1", + "nth-check": "~1.0.1" + } + }, + "css-select-base-adapter": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz", + "integrity": "sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w==", + "dev": true + }, + "css-to-react-native": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-2.3.2.tgz", + "integrity": "sha512-VOFaeZA053BqvvvqIA8c9n0+9vFppVBAHCp6JgFTtTMU3Mzi+XnelJ9XC9ul3BqFzZyQ5N+H0SnwsWT2Ebchxw==", + "requires": { + "camelize": "^1.0.0", + "css-color-keywords": "^1.0.0", + "postcss-value-parser": "^3.3.0" + } + }, + "css-tree": { + "version": "1.0.0-alpha.37", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.37.tgz", + "integrity": "sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg==", + "dev": true, + "requires": { + "mdn-data": "2.0.4", + "source-map": "^0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "css-vendor": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/css-vendor/-/css-vendor-0.3.8.tgz", + "integrity": "sha1-ZCHP0wNM5mT+dnOXL9ARn8KJQfo=", + "requires": { + "is-in-browser": "^1.0.2" + } + }, + "css-what": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-2.1.2.tgz", + "integrity": "sha512-wan8dMWQ0GUeF7DGEPVjhHemVW/vy6xUYmFzRY8RYqgA0JtXC9rJmbScBjqSu6dg9q0lwPQy6ZAmJVr3PPTvqQ==", + "dev": true + }, + "cssdb": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/cssdb/-/cssdb-4.4.0.tgz", + "integrity": "sha512-LsTAR1JPEM9TpGhl/0p3nQecC2LJ0kD8X5YARu1hk/9I1gril5vDtMZyNxcEpxxDj34YNck/ucjuoUd66K03oQ==", + "dev": true + }, + "cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true + }, + "cssnano": { + "version": "4.1.10", + "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-4.1.10.tgz", + "integrity": "sha512-5wny+F6H4/8RgNlaqab4ktc3e0/blKutmq8yNlBFXA//nSFFAqAngjNVRzUvCgYROULmZZUoosL/KSoZo5aUaQ==", + "dev": true, + "requires": { + "cosmiconfig": "^5.0.0", + "cssnano-preset-default": "^4.0.7", + "is-resolvable": "^1.0.0", + "postcss": "^7.0.0" + } + }, + "cssnano-preset-default": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-4.0.7.tgz", + "integrity": "sha512-x0YHHx2h6p0fCl1zY9L9roD7rnlltugGu7zXSKQx6k2rYw0Hi3IqxcoAGF7u9Q5w1nt7vK0ulxV8Lo+EvllGsA==", + "dev": true, + "requires": { + "css-declaration-sorter": "^4.0.1", + "cssnano-util-raw-cache": "^4.0.1", + "postcss": "^7.0.0", + "postcss-calc": "^7.0.1", + "postcss-colormin": "^4.0.3", + "postcss-convert-values": "^4.0.1", + "postcss-discard-comments": "^4.0.2", + "postcss-discard-duplicates": "^4.0.2", + "postcss-discard-empty": "^4.0.1", + "postcss-discard-overridden": "^4.0.1", + "postcss-merge-longhand": "^4.0.11", + "postcss-merge-rules": "^4.0.3", + "postcss-minify-font-values": "^4.0.2", + "postcss-minify-gradients": "^4.0.2", + "postcss-minify-params": "^4.0.2", + "postcss-minify-selectors": "^4.0.2", + "postcss-normalize-charset": "^4.0.1", + "postcss-normalize-display-values": "^4.0.2", + "postcss-normalize-positions": "^4.0.2", + "postcss-normalize-repeat-style": "^4.0.2", + "postcss-normalize-string": "^4.0.2", + "postcss-normalize-timing-functions": "^4.0.2", + "postcss-normalize-unicode": "^4.0.1", + "postcss-normalize-url": "^4.0.1", + "postcss-normalize-whitespace": "^4.0.2", + "postcss-ordered-values": "^4.1.2", + "postcss-reduce-initial": "^4.0.3", + "postcss-reduce-transforms": "^4.0.2", + "postcss-svgo": "^4.0.2", + "postcss-unique-selectors": "^4.0.1" + } + }, + "cssnano-util-get-arguments": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cssnano-util-get-arguments/-/cssnano-util-get-arguments-4.0.0.tgz", + "integrity": "sha1-7ToIKZ8h11dBsg87gfGU7UnMFQ8=", + "dev": true + }, + "cssnano-util-get-match": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cssnano-util-get-match/-/cssnano-util-get-match-4.0.0.tgz", + "integrity": "sha1-wOTKB/U4a7F+xeUiULT1lhNlFW0=", + "dev": true + }, + "cssnano-util-raw-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/cssnano-util-raw-cache/-/cssnano-util-raw-cache-4.0.1.tgz", + "integrity": "sha512-qLuYtWK2b2Dy55I8ZX3ky1Z16WYsx544Q0UWViebptpwn/xDBmog2TLg4f+DBMg1rJ6JDWtn96WHbOKDWt1WQA==", + "dev": true, + "requires": { + "postcss": "^7.0.0" + } + }, + "cssnano-util-same-parent": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/cssnano-util-same-parent/-/cssnano-util-same-parent-4.0.1.tgz", + "integrity": "sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q==", + "dev": true + }, + "csso": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/csso/-/csso-4.0.3.tgz", + "integrity": "sha512-NL3spysxUkcrOgnpsT4Xdl2aiEiBG6bXswAABQVHcMrfjjBisFOKwLDOmf4wf32aPdcJws1zds2B0Rg+jqMyHQ==", + "dev": true, + "requires": { + "css-tree": "1.0.0-alpha.39" + }, + "dependencies": { + "css-tree": { + "version": "1.0.0-alpha.39", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.39.tgz", + "integrity": "sha512-7UvkEYgBAHRG9Nt980lYxjsTrCyHFN53ky3wVsDkiMdVqylqRt+Zc+jm5qw7/qyOvN2dHSYtX0e4MbCCExSvnA==", + "dev": true, + "requires": { + "mdn-data": "2.0.6", + "source-map": "^0.6.1" + } + }, + "mdn-data": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.6.tgz", + "integrity": "sha512-rQvjv71olwNHgiTbfPZFkJtjNMciWgswYeciZhtvWLO8bmX3TnhyA62I6sTWOyZssWHJJjY6/KiWwqQsWWsqOA==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "cssom": { + "version": "0.3.8", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.8.tgz", + "integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg==", + "dev": true + }, + "cssstyle": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-1.4.0.tgz", + "integrity": "sha512-GBrLZYZ4X4x6/QEoBnIrqb8B/f5l4+8me2dkom/j1Gtbxy0kBv6OGzKuAsGM75bkGwGAFkt56Iwg28S3XTZgSA==", + "dev": true, + "requires": { + "cssom": "0.3.x" + } + }, + "csstype": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-2.6.0.tgz", "integrity": "sha512-by8hi8BlLbowQq0qtkx54d9aN73R9oUW20HISpka5kmgsR9F7nnxgfsemuR2sdCKZh+CDNf5egW9UZMm4mgJRg==" }, "currently-unhandled": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz", "integrity": "sha1-mI3zP+qxke95mmE2nddsF635V+o=", + "dev": true, "requires": { "array-find-index": "^1.0.1" } @@ -3590,14 +6524,17 @@ "cyclist": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/cyclist/-/cyclist-0.2.2.tgz", - "integrity": "sha1-GzN5LhHpFKL9bW7WRHRkRE5fpkA=" + "integrity": "sha1-GzN5LhHpFKL9bW7WRHRkRE5fpkA=", + "dev": true }, "d": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/d/-/d-1.0.0.tgz", - "integrity": "sha1-dUu1v+VUUdpppYuU1F9MWwRi1Y8=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/d/-/d-1.0.1.tgz", + "integrity": "sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==", + "dev": true, "requires": { - "es5-ext": "^0.10.9" + "es5-ext": "^0.10.50", + "type": "^1.0.1" } }, "d3": { @@ -3888,10 +6825,17 @@ "lodash": "^4.17.4" } }, + "damerau-levenshtein": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.6.tgz", + "integrity": "sha512-JVrozIeElnj3QzfUIt8tB8YMluBJom4Vw9qTPpjGYQ9fYlB3D/rb6OordUxf3xeFB35LKWs0xqcO5U6ySvBtug==", + "dev": true + }, "dashdash": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "dev": true, "requires": { "assert-plus": "^1.0.0" } @@ -3900,6 +6844,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-1.1.0.tgz", "integrity": "sha512-YTWYI9se1P55u58gL5GkQHW4P6VJBJ5iBT+B5a7i2Tjadhv52paJG0qHX4A0OR6/t52odI64KP2YvFpkDOi3eQ==", + "dev": true, "requires": { "abab": "^2.0.0", "whatwg-mimetype": "^2.2.0", @@ -3907,9 +6852,10 @@ }, "dependencies": { "whatwg-url": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.0.0.tgz", - "integrity": "sha512-37GeVSIJ3kn1JgKyjiYNmSLP1yzbpb29jdmwBSgkD9h40/hyrR/OifpVUndji3tmwGgD8qpw7iQu3RSbCrBpsQ==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz", + "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==", + "dev": true, "requires": { "lodash.sortby": "^4.7.0", "tr46": "^1.0.1", @@ -3918,16 +6864,17 @@ } } }, - "date-format": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/date-format/-/date-format-0.0.2.tgz", - "integrity": "sha1-+v1Ej3IRXvHitzkVWukvK+bCjdE=", + "date-and-time": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.11.0.tgz", + "integrity": "sha512-VyzhHurex4wlg9oMszn7O+kxHchphWjzDn7Mv0WfkFKI6hSNOQePpTBFGsnRakvLNzQKXqPBAVV8DOxUGtUxqA==", "dev": true }, "date-now": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/date-now/-/date-now-0.1.4.tgz", - "integrity": "sha1-6vQ5/U1ISK105cx9vvIAZyueNFs=" + "integrity": "sha1-6vQ5/U1ISK105cx9vvIAZyueNFs=", + "dev": true }, "dateformat": { "version": "1.0.12", @@ -3955,7 +6902,8 @@ "decamelize": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", - "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "dev": true }, "decode-uri-component": { "version": "0.2.0", @@ -3967,33 +6915,32 @@ "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz", "integrity": "sha1-9dJgKStmDghO/0zbyfCK0yR0SLU=" }, - "deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==" - }, "deep-is": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", - "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=" + "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=", + "dev": true }, "deepmerge": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-3.0.0.tgz", "integrity": "sha512-a8z8bkgHsAML+uHLqmMS83HHlpy3PvZOOuiTQqaa3wu8ZVg3h0hqHk6aCsGdOnZV2XMM/FRimNGjUh0KCcmHBw==" }, - "default-require-extensions": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-1.0.0.tgz", - "integrity": "sha1-836hXT4T/9m0N9M+GnW1+5eHTLg=", + "default-gateway": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-4.2.0.tgz", + "integrity": "sha512-h6sMrVB1VMWVrW13mSc6ia/DwYYw5MN6+exNu1OaJeFac5aSAvwM7lZ0NVfTABuSkQelr4h5oebg3KB1XPdjgA==", + "dev": true, "requires": { - "strip-bom": "^2.0.0" + "execa": "^1.0.0", + "ip-regex": "^2.1.0" } }, "define-properties": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, "requires": { "object-keys": "^1.0.12" } @@ -4035,39 +6982,73 @@ } } }, - "defined": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/defined/-/defined-1.0.0.tgz", - "integrity": "sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM=" - }, - "del": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/del/-/del-2.2.2.tgz", - "integrity": "sha1-wSyYHQZ4RshLyvhiz/kw2Qf/0ag=", - "requires": { - "globby": "^5.0.0", - "is-path-cwd": "^1.0.0", - "is-path-in-cwd": "^1.0.0", - "object-assign": "^4.0.1", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0", - "rimraf": "^2.2.8" - } - }, + "del": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/del/-/del-4.1.1.tgz", + "integrity": "sha512-QwGuEUouP2kVwQenAsOof5Fv8K9t3D8Ca8NxcXKrIpEHjTXK5J2nXLdP+ALI1cgv8wj7KuwBhTwBkOZSJKM5XQ==", + "dev": true, + "requires": { + "@types/glob": "^7.1.1", + "globby": "^6.1.0", + "is-path-cwd": "^2.0.0", + "is-path-in-cwd": "^2.0.0", + "p-map": "^2.0.0", + "pify": "^4.0.1", + "rimraf": "^2.6.3" + }, + "dependencies": { + "globby": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", + "integrity": "sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=", + "dev": true, + "requires": { + "array-union": "^1.0.1", + "glob": "^7.0.3", + "object-assign": "^4.0.1", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + }, + "dependencies": { + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } + } + }, + "p-map": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", + "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==", + "dev": true + }, + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + } + } + }, "delayed-stream": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "dev": true }, "depd": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", - "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=" + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=", + "dev": true }, "des.js": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.0.tgz", "integrity": "sha1-wHTS4qpqipoH29YfmhXCzYPsjsw=", + "dev": true, "requires": { "inherits": "^2.0.1", "minimalistic-assert": "^1.0.0" @@ -4076,30 +7057,26 @@ "destroy": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", - "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" - }, - "detect-indent": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-4.0.0.tgz", - "integrity": "sha1-920GQ1LN9Docts5hnE7jqUdd4gg=", - "requires": { - "repeating": "^2.0.0" - } + "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=", + "dev": true }, "detect-newline": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-2.1.0.tgz", - "integrity": "sha1-9B8cEL5LAOh7XxPaaAdZ8sW/0+I=" + "integrity": "sha1-9B8cEL5LAOh7XxPaaAdZ8sW/0+I=", + "dev": true }, "detect-node": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.0.4.tgz", - "integrity": "sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw==" + "integrity": "sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw==", + "dev": true }, "detect-port-alt": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz", "integrity": "sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q==", + "dev": true, "requires": { "address": "^1.0.1", "debug": "^2.6.0" @@ -4109,6 +7086,7 @@ "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, "requires": { "ms": "2.0.0" } @@ -4118,517 +7096,1224 @@ "diff": { "version": "3.5.0", "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", - "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==" + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true }, "diff-match-patch": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.4.tgz", "integrity": "sha512-Uv3SW8bmH9nAtHKaKSanOQmj2DnlH65fUpcrMdfdaOxUG02QQ4YGZ8AE7kKOMisF7UqvOlGKVYWRvezdncW9lg==" }, + "diff-sequences": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-24.9.0.tgz", + "integrity": "sha512-Dj6Wk3tWyTE+Fo1rW8v0Xhwk80um6yFYKbuAxc9c3EZxIHFDYwbi34Uk42u1CdnIiVorvt4RmlSDjIPyzGC2ew==", + "dev": true + }, "diffie-hellman": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", + "dev": true, "requires": { "bn.js": "^4.1.0", "miller-rabin": "^4.0.0", "randombytes": "^2.0.0" } }, + "dir-glob": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz", + "integrity": "sha512-37qirFDz8cA5fimp9feo43fSuRo2gHwaIn6dXL8Ber1dGwUosDrGZeCCXq57WnIqE4aQ+u3eQZzsk1yOzhdwag==", + "dev": true, + "requires": { + "arrify": "^1.0.1", + "path-type": "^3.0.0" + }, + "dependencies": { + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "dev": true, + "requires": { + "pify": "^3.0.0" + } + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true + } + } + }, "discontinuous-range": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/discontinuous-range/-/discontinuous-range-1.0.0.tgz", "integrity": "sha1-44Mx8IRLukm5qctxx3FYWqsbxlo=", "dev": true }, - "diverged": { + "dns-equal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", + "integrity": "sha1-s55/HabrCnW6nBcySzR1PEfgZU0=", + "dev": true + }, + "dns-packet": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-1.3.1.tgz", + "integrity": "sha512-0UxfQkMhYAUaZI+xrNZOz/as5KgDU0M/fQ9b6SpkyLbk3GEswDi6PADJVaYJradtRVsRIlF1zLyOodbcTCDzUg==", + "dev": true, + "requires": { + "ip": "^1.1.0", + "safe-buffer": "^5.0.1" + } + }, + "dns-txt": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/dns-txt/-/dns-txt-2.0.2.tgz", + "integrity": "sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY=", + "dev": true, + "requires": { + "buffer-indexof": "^1.0.0" + } + }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, + "dom-converter": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", + "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", + "dev": true, + "requires": { + "utila": "~0.4" + } + }, + "dom-helpers": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-3.4.0.tgz", + "integrity": "sha512-LnuPJ+dwqKDIyotW1VzmOZ5TONUN7CwkCR5hrgawTUbkBGYdeoNLZo6nNfGkCrjtE1nXXaj7iMMpDa8/d9WoIA==", + "requires": { + "@babel/runtime": "^7.1.2" + } + }, + "dom-serializer": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.0.tgz", + "integrity": "sha1-BzxpdUbOB4DOI75KKOKT5AvDDII=", + "dev": true, + "requires": { + "domelementtype": "~1.1.1", + "entities": "~1.1.1" + }, + "dependencies": { + "domelementtype": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.1.3.tgz", + "integrity": "sha1-vSh3PiZCiBrsUVRJJCmcXNgiGFs=", + "dev": true + } + } + }, + "dom-walk": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.1.tgz", + "integrity": "sha1-ZyIm3HTI95mtNTB9+TaroRrNYBg=" + }, + "domain-browser": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.2.0.tgz", + "integrity": "sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA==", + "dev": true + }, + "domelementtype": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.1.tgz", + "integrity": "sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==", + "dev": true + }, + "domexception": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/domexception/-/domexception-1.0.1.tgz", + "integrity": "sha512-raigMkn7CJNNo6Ihro1fzG7wr3fHuYVytzquZKX5n0yizGsTcYgzdIUwj1X9pK0VvjeihV+XiclP+DjwbsSKug==", + "dev": true, + "requires": { + "webidl-conversions": "^4.0.2" + } + }, + "domhandler": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.4.2.tgz", + "integrity": "sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==", + "dev": true, + "requires": { + "domelementtype": "1" + } + }, + "domutils": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.5.1.tgz", + "integrity": "sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8=", + "dev": true, + "requires": { + "dom-serializer": "0", + "domelementtype": "1" + } + }, + "dot-case": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-2.1.1.tgz", + "integrity": "sha1-NNzzf1Co6TwrO8qLt/uRVcfaO+4=", + "dev": true, + "requires": { + "no-case": "^2.2.0" + } + }, + "dot-prop": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.2.0.tgz", + "integrity": "sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==", + "dev": true, + "requires": { + "is-obj": "^2.0.0" + } + }, + "dotenv": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz", + "integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==", + "dev": true + }, + "dotenv-expand": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-5.1.0.tgz", + "integrity": "sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA==", + "dev": true + }, + "duplexer": { + "version": "0.1.1", + "resolved": "http://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz", + "integrity": "sha1-rOb/gIwc5mtX0ev5eXessCM0z8E=", + "dev": true + }, + "duplexify": { + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz", + "integrity": "sha512-vM58DwdnKmty+FSPzT14K9JXb90H+j5emaR4KYbr2KTIz00WHGbWOe5ghQTx233ZCLZtrGDALzKwcjEtSt35mA==", + "dev": true, + "requires": { + "end-of-stream": "^1.0.0", + "inherits": "^2.0.1", + "readable-stream": "^2.0.0", + "stream-shift": "^1.0.0" + } + }, + "dynamic-dedupe": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/dynamic-dedupe/-/dynamic-dedupe-0.3.0.tgz", + "integrity": "sha1-BuRMIj9eTpTXjvnbI6ZRXOL5YqE=", + "dev": true, + "requires": { + "xtend": "^4.0.0" + } + }, + "ecc-jsbn": { "version": "0.1.2", - "resolved": "https://registry.npmjs.org/diverged/-/diverged-0.1.2.tgz", - "integrity": "sha512-geHB/gyYXINy1H684Oufq8n7ig5Hsm4lHH6tVDLjVfjQhKReWdAnQYXStxKHWQC13E7OI+IOZqbX8jiEG7qbfA==", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "dev": true, + "requires": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.0.1" + } + }, + "ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=", + "dev": true + }, + "ejs": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-2.7.4.tgz", + "integrity": "sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==", + "dev": true + }, + "electron-to-chromium": { + "version": "1.3.359", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.359.tgz", + "integrity": "sha512-ewZp4BQftbLclBwmFYoTrlyiLMXQTiYeqh1hn24sWao9bGhUHzQtpytymN8JsenWlQ9SbBWynTPvfghb0Ipn1Q==", + "dev": true + }, + "elliptic": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.4.1.tgz", + "integrity": "sha512-BsXLz5sqX8OHcsh7CqBMztyXARmGQ3LWPtGjJi6DiJHq5C/qvi9P3OqgswKSDftbu8+IoI/QDTAm2fFnQ9SZSQ==", "dev": true, "requires": { - "diff": "^3.5.0", - "pixelmatch": "^4.0.2", - "pngjs": "^3.3.3", - "super-simple-web-server": "^1.0.0" + "bn.js": "^4.4.0", + "brorand": "^1.0.1", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.0" + } + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "emojis-list": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-2.1.0.tgz", + "integrity": "sha1-TapNnbAPmBmIDHn6RXrlsJof04k=", + "dev": true + }, + "encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=", + "dev": true + }, + "encoding": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz", + "integrity": "sha1-U4tm8+5izRq1HsMjgp0flIDHS+s=", + "requires": { + "iconv-lite": "~0.4.13" } }, - "dns-equal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", - "integrity": "sha1-s55/HabrCnW6nBcySzR1PEfgZU0=" - }, - "dns-packet": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-1.3.1.tgz", - "integrity": "sha512-0UxfQkMhYAUaZI+xrNZOz/as5KgDU0M/fQ9b6SpkyLbk3GEswDi6PADJVaYJradtRVsRIlF1zLyOodbcTCDzUg==", + "end-of-stream": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", + "integrity": "sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q==", + "dev": true, "requires": { - "ip": "^1.1.0", - "safe-buffer": "^5.0.1" + "once": "^1.4.0" } }, - "dns-txt": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/dns-txt/-/dns-txt-2.0.2.tgz", - "integrity": "sha1-uR2Ab10nGI5Ks+fRB9iBocxGQrY=", + "enhanced-resolve": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz", + "integrity": "sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA==", + "dev": true, "requires": { - "buffer-indexof": "^1.0.0" + "graceful-fs": "^4.1.2", + "memory-fs": "^0.5.0", + "tapable": "^1.0.0" + }, + "dependencies": { + "memory-fs": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.5.0.tgz", + "integrity": "sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA==", + "dev": true, + "requires": { + "errno": "^0.1.3", + "readable-stream": "^2.0.1" + } + } } }, - "dom-converter": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", - "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", + "ent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ent/-/ent-2.2.0.tgz", + "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=", + "dev": true + }, + "entities": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz", + "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==", + "dev": true + }, + "enzyme": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/enzyme/-/enzyme-3.10.0.tgz", + "integrity": "sha512-p2yy9Y7t/PFbPoTvrWde7JIYB2ZyGC+NgTNbVEGvZ5/EyoYSr9aG/2rSbVvyNvMHEhw9/dmGUJHWtfQIEiX9pg==", + "dev": true, "requires": { - "utila": "~0.4" + "array.prototype.flat": "^1.2.1", + "cheerio": "^1.0.0-rc.2", + "function.prototype.name": "^1.1.0", + "has": "^1.0.3", + "html-element-map": "^1.0.0", + "is-boolean-object": "^1.0.0", + "is-callable": "^1.1.4", + "is-number-object": "^1.0.3", + "is-regex": "^1.0.4", + "is-string": "^1.0.4", + "is-subset": "^0.1.1", + "lodash.escape": "^4.0.1", + "lodash.isequal": "^4.5.0", + "object-inspect": "^1.6.0", + "object-is": "^1.0.1", + "object.assign": "^4.1.0", + "object.entries": "^1.0.4", + "object.values": "^1.0.4", + "raf": "^3.4.0", + "rst-selector-parser": "^2.2.3", + "string.prototype.trim": "^1.1.2" } }, - "dom-helpers": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-3.4.0.tgz", - "integrity": "sha512-LnuPJ+dwqKDIyotW1VzmOZ5TONUN7CwkCR5hrgawTUbkBGYdeoNLZo6nNfGkCrjtE1nXXaj7iMMpDa8/d9WoIA==", + "enzyme-adapter-react-16": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/enzyme-adapter-react-16/-/enzyme-adapter-react-16-1.15.1.tgz", + "integrity": "sha512-yMPxrP3vjJP+4wL/qqfkT6JAIctcwKF+zXO6utlGPgUJT2l4tzrdjMDWGd/Pp1BjHBcljhN24OzNEGRteibJhA==", + "dev": true, "requires": { - "@babel/runtime": "^7.1.2" + "enzyme-adapter-utils": "^1.12.1", + "enzyme-shallow-equal": "^1.0.0", + "has": "^1.0.3", + "object.assign": "^4.1.0", + "object.values": "^1.1.0", + "prop-types": "^15.7.2", + "react-is": "^16.10.2", + "react-test-renderer": "^16.0.0-0", + "semver": "^5.7.0" + }, + "dependencies": { + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "dev": true, + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, + "react-is": { + "version": "16.10.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.10.2.tgz", + "integrity": "sha512-INBT1QEgtcCCgvccr5/86CfD71fw9EPmDxgiJX4I2Ddr6ZsV6iFXsuby+qWJPtmNuMY0zByTsG4468P7nHuNWA==", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true + } } }, - "dom-serializer": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.0.tgz", - "integrity": "sha1-BzxpdUbOB4DOI75KKOKT5AvDDII=", + "enzyme-adapter-utils": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/enzyme-adapter-utils/-/enzyme-adapter-utils-1.12.1.tgz", + "integrity": "sha512-KWiHzSjZaLEoDCOxY8Z1RAbUResbqKN5bZvenPbfKtWorJFVETUw754ebkuCQ3JKm0adx1kF8JaiR+PHPiP47g==", + "dev": true, "requires": { - "domelementtype": "~1.1.1", - "entities": "~1.1.1" + "airbnb-prop-types": "^2.15.0", + "function.prototype.name": "^1.1.1", + "object.assign": "^4.1.0", + "object.fromentries": "^2.0.1", + "prop-types": "^15.7.2", + "semver": "^5.7.0" }, "dependencies": { - "domelementtype": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.1.3.tgz", - "integrity": "sha1-vSh3PiZCiBrsUVRJJCmcXNgiGFs=" + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "dev": true, + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, + "react-is": { + "version": "16.10.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.10.2.tgz", + "integrity": "sha512-INBT1QEgtcCCgvccr5/86CfD71fw9EPmDxgiJX4I2Ddr6ZsV6iFXsuby+qWJPtmNuMY0zByTsG4468P7nHuNWA==", + "dev": true + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "dev": true } } }, - "dom-urls": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/dom-urls/-/dom-urls-1.1.0.tgz", - "integrity": "sha1-AB3fgWKM0ecGElxxdvU8zsVdkY4=", + "enzyme-shallow-equal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/enzyme-shallow-equal/-/enzyme-shallow-equal-1.0.0.tgz", + "integrity": "sha512-VUf+q5o1EIv2ZaloNQQtWCJM9gpeux6vudGVH6vLmfPXFLRuxl5+Aq3U260wof9nn0b0i+P5OEUXm1vnxkRpXQ==", + "dev": true, "requires": { - "urijs": "^1.16.1" + "has": "^1.0.3", + "object-is": "^1.0.1" } }, - "dom-walk": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/dom-walk/-/dom-walk-0.1.1.tgz", - "integrity": "sha1-ZyIm3HTI95mtNTB9+TaroRrNYBg=" - }, - "domain-browser": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-1.2.0.tgz", - "integrity": "sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA==" - }, - "domelementtype": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.1.tgz", - "integrity": "sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w==" - }, - "domexception": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/domexception/-/domexception-1.0.1.tgz", - "integrity": "sha512-raigMkn7CJNNo6Ihro1fzG7wr3fHuYVytzquZKX5n0yizGsTcYgzdIUwj1X9pK0VvjeihV+XiclP+DjwbsSKug==", + "enzyme-to-json": { + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/enzyme-to-json/-/enzyme-to-json-3.3.5.tgz", + "integrity": "sha512-DmH1wJ68HyPqKSYXdQqB33ZotwfUhwQZW3IGXaNXgR69Iodaoj8TF/D9RjLdz4pEhGq2Tx2zwNUIjBuqoZeTgA==", + "dev": true, "requires": { - "webidl-conversions": "^4.0.2" + "lodash": "^4.17.4" } }, - "domhandler": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.1.0.tgz", - "integrity": "sha1-0mRvXlf2w7qxHPbLBdPArPdBJZQ=", + "errno": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.7.tgz", + "integrity": "sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg==", + "dev": true, "requires": { - "domelementtype": "1" + "prr": "~1.0.1" } }, - "domutils": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.5.1.tgz", - "integrity": "sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8=", + "error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, "requires": { - "dom-serializer": "0", - "domelementtype": "1" + "is-arrayish": "^0.2.1" } }, - "dot-case": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-2.1.1.tgz", - "integrity": "sha1-NNzzf1Co6TwrO8qLt/uRVcfaO+4=", + "es-abstract": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.13.0.tgz", + "integrity": "sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg==", "dev": true, "requires": { - "no-case": "^2.2.0" + "es-to-primitive": "^1.2.0", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "is-callable": "^1.1.4", + "is-regex": "^1.0.4", + "object-keys": "^1.0.12" } }, - "dot-prop": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.0.tgz", - "integrity": "sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ==", + "es-to-primitive": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", + "integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==", + "dev": true, "requires": { - "is-obj": "^1.0.0" + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" } }, - "dotenv": { - "version": "4.0.0", - "resolved": "http://registry.npmjs.org/dotenv/-/dotenv-4.0.0.tgz", - "integrity": "sha1-hk7xN5rO1Vzm+V3r7NzhefegzR0=" - }, - "dotenv-expand": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-4.2.0.tgz", - "integrity": "sha1-3vHxyl1gWdJKdm5YeULCEQbOEnU=" - }, - "duplexer": { - "version": "0.1.1", - "resolved": "http://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz", - "integrity": "sha1-rOb/gIwc5mtX0ev5eXessCM0z8E=" - }, - "duplexer3": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", - "integrity": "sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=" + "es5-ext": { + "version": "0.10.53", + "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.53.tgz", + "integrity": "sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q==", + "dev": true, + "requires": { + "es6-iterator": "~2.0.3", + "es6-symbol": "~3.1.3", + "next-tick": "~1.0.0" + } }, - "duplexify": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz", - "integrity": "sha512-vM58DwdnKmty+FSPzT14K9JXb90H+j5emaR4KYbr2KTIz00WHGbWOe5ghQTx233ZCLZtrGDALzKwcjEtSt35mA==", + "es6-iterator": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", + "integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=", + "dev": true, "requires": { - "end-of-stream": "^1.0.0", - "inherits": "^2.0.1", - "readable-stream": "^2.0.0", - "stream-shift": "^1.0.0" + "d": "1", + "es5-ext": "^0.10.35", + "es6-symbol": "^3.1.1" } }, - "dynamic-dedupe": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/dynamic-dedupe/-/dynamic-dedupe-0.3.0.tgz", - "integrity": "sha1-BuRMIj9eTpTXjvnbI6ZRXOL5YqE=", + "es6-promise": { + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.5.tgz", + "integrity": "sha512-n6wvpdE43VFtJq+lUDYDBFUwV8TZbuGXLV4D6wKafg13ldznKsyEvatubnmUe31zcvelSzOHF+XbaT+Bl9ObDg==", + "dev": true + }, + "es6-promisify": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", + "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", "dev": true, "requires": { - "xtend": "^4.0.0" + "es6-promise": "^4.0.3" } }, - "ecc-jsbn": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "es6-symbol": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz", + "integrity": "sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==", + "dev": true, "requires": { - "jsbn": "~0.1.0", - "safer-buffer": "^2.1.0" + "d": "^1.0.1", + "ext": "^1.1.2" } }, - "ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" - }, - "ejs": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-2.6.1.tgz", - "integrity": "sha512-0xy4A/twfrRCnkhfk8ErDi5DqdAsAqeGxht4xkCUrsvhhbQNs7E+4jV0CN7+NKIY0aHE72+XvqtBIXzD31ZbXQ==", + "escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=", "dev": true }, - "electron-to-chromium": { - "version": "1.3.97", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.97.tgz", - "integrity": "sha512-cElMxsoUl8BoY+DnTNXebdVuDz/l0HS5ijpq1uUcHNMrzFiQRKmADn39luHVIaeISO8qtFbNmqhSBU/BQCHwig==" + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true }, - "elliptic": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.4.1.tgz", - "integrity": "sha512-BsXLz5sqX8OHcsh7CqBMztyXARmGQ3LWPtGjJi6DiJHq5C/qvi9P3OqgswKSDftbu8+IoI/QDTAm2fFnQ9SZSQ==", + "escodegen": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.1.tgz", + "integrity": "sha512-Bmt7NcRySdIfNPfU2ZoXDrrXsG9ZjvDxcAlMfDUgRBjLOWTuIACXPBFJH7Z+cLb40JeQco5toikyc9t9P8E9SQ==", + "dev": true, "requires": { - "bn.js": "^4.4.0", - "brorand": "^1.0.1", - "hash.js": "^1.0.0", - "hmac-drbg": "^1.0.0", - "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0", - "minimalistic-crypto-utils": "^1.0.0" + "esprima": "^4.0.1", + "estraverse": "^4.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1", + "source-map": "~0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true + } } }, - "emojis-list": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-2.1.0.tgz", - "integrity": "sha1-TapNnbAPmBmIDHn6RXrlsJof04k=" - }, - "encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" + "eslint": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", + "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.10.0", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.3", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.2", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.0.0", + "globals": "^12.1.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^7.0.0", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.14", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.3", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "globals": { + "version": "12.4.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", + "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", + "dev": true, + "requires": { + "type-fest": "^0.8.1" + } + }, + "import-fresh": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.2.1.tgz", + "integrity": "sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "dev": true + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "strip-json-comments": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.0.tgz", + "integrity": "sha512-e6/d0eBu7gHtdCqFt0xJr642LdToM5/cN4Qb9DbHjVx1CP5RyeM+zH7pbecEmDv/lBqb0QH+6Uqq75rxFPkM0w==", + "dev": true + } + } }, - "encoding": { - "version": "0.1.12", - "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.12.tgz", - "integrity": "sha1-U4tm8+5izRq1HsMjgp0flIDHS+s=", + "eslint-config-react-app": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/eslint-config-react-app/-/eslint-config-react-app-5.2.1.tgz", + "integrity": "sha512-pGIZ8t0mFLcV+6ZirRgYK6RVqUIKRIi9MmgzUEmrIknsn3AdO0I32asO86dJgloHq+9ZPl8UIg8mYrvgP5u2wQ==", + "dev": true, "requires": { - "iconv-lite": "~0.4.13" + "confusing-browser-globals": "^1.0.9" } }, - "end-of-stream": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", - "integrity": "sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q==", + "eslint-import-resolver-node": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.3.tgz", + "integrity": "sha512-b8crLDo0M5RSe5YG8Pu2DYBj71tSB6OvXkfzwbJU2w7y8P4/yo0MyF8jU26IEuEuHF2K5/gcAJE3LhQGqBBbVg==", + "dev": true, "requires": { - "once": "^1.4.0" + "debug": "^2.6.9", + "resolve": "^1.13.1" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + } } }, - "enhanced-resolve": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-3.4.1.tgz", - "integrity": "sha1-BCHjOf1xQZs9oT0Smzl5BAIwR24=", + "eslint-loader": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/eslint-loader/-/eslint-loader-3.0.3.tgz", + "integrity": "sha512-+YRqB95PnNvxNp1HEjQmvf9KNvCin5HXYYseOXVC2U0KEcw4IkQ2IQEBG46j7+gW39bMzeu0GsUhVbBY3Votpw==", + "dev": true, "requires": { - "graceful-fs": "^4.1.2", - "memory-fs": "^0.4.0", - "object-assign": "^4.0.1", - "tapable": "^0.2.7" + "fs-extra": "^8.1.0", + "loader-fs-cache": "^1.0.2", + "loader-utils": "^1.2.3", + "object-hash": "^2.0.1", + "schema-utils": "^2.6.1" + }, + "dependencies": { + "ajv-keywords": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", + "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==", + "dev": true + }, + "schema-utils": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.6.5.tgz", + "integrity": "sha512-5KXuwKziQrTVHh8j/Uxz+QUbxkaLW9X/86NBlx/gnKgtsZA2GIVMUn17qWhRFwF8jdYb3Dig5hRO/W5mZqy6SQ==", + "dev": true, + "requires": { + "ajv": "^6.12.0", + "ajv-keywords": "^3.4.1" + } + } } }, - "entities": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz", - "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==" + "eslint-module-utils": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.0.tgz", + "integrity": "sha512-6j9xxegbqe8/kZY8cYpcp0xhbK0EgJlg3g9mib3/miLaExuuwc3n5UEfSnU6hWMbT0FAYVvDbL9RrRgpUeQIvA==", + "dev": true, + "requires": { + "debug": "^2.6.9", + "pkg-dir": "^2.0.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "pkg-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "dev": true, + "requires": { + "find-up": "^2.1.0" + } + } + } }, - "enzyme": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/enzyme/-/enzyme-3.8.0.tgz", - "integrity": "sha512-bfsWo5nHyZm1O1vnIsbwdfhU989jk+squU9NKvB+Puwo5j6/Wg9pN5CO0YJelm98Dao3NPjkDZk+vvgwpMwYxw==", + "eslint-plugin-flowtype": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-flowtype/-/eslint-plugin-flowtype-4.6.0.tgz", + "integrity": "sha512-W5hLjpFfZyZsXfo5anlu7HM970JBDqbEshAJUkeczP6BFCIfJXuiIBQXyberLRtOStT0OGPF8efeTbxlHk4LpQ==", "dev": true, "requires": { - "array.prototype.flat": "^1.2.1", - "cheerio": "^1.0.0-rc.2", - "function.prototype.name": "^1.1.0", - "has": "^1.0.3", - "is-boolean-object": "^1.0.0", - "is-callable": "^1.1.4", - "is-number-object": "^1.0.3", - "is-string": "^1.0.4", - "is-subset": "^0.1.1", - "lodash.escape": "^4.0.1", - "lodash.isequal": "^4.5.0", - "object-inspect": "^1.6.0", - "object-is": "^1.0.1", - "object.assign": "^4.1.0", - "object.entries": "^1.0.4", - "object.values": "^1.0.4", - "raf": "^3.4.0", - "rst-selector-parser": "^2.2.3", - "string.prototype.trim": "^1.1.2" + "lodash": "^4.17.15" } }, - "enzyme-adapter-react-16": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/enzyme-adapter-react-16/-/enzyme-adapter-react-16-1.7.1.tgz", - "integrity": "sha512-OQXKgfHWyHN3sFu2nKj3mhgRcqIPIJX6aOzq5AHVFES4R9Dw/vCBZFMPyaG81g2AZ5DogVh39P3MMNUbqNLTcw==", + "eslint-plugin-import": { + "version": "2.20.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.20.1.tgz", + "integrity": "sha512-qQHgFOTjguR+LnYRoToeZWT62XM55MBVXObHM6SKFd1VzDcX/vqT1kAz8ssqigh5eMj8qXcRoXXGZpPP6RfdCw==", "dev": true, "requires": { - "enzyme-adapter-utils": "^1.9.0", - "function.prototype.name": "^1.1.0", - "object.assign": "^4.1.0", - "object.values": "^1.0.4", - "prop-types": "^15.6.2", - "react-is": "^16.6.1", - "react-test-renderer": "^16.0.0-0" + "array-includes": "^3.0.3", + "array.prototype.flat": "^1.2.1", + "contains-path": "^0.1.0", + "debug": "^2.6.9", + "doctrine": "1.5.0", + "eslint-import-resolver-node": "^0.3.2", + "eslint-module-utils": "^2.4.1", + "has": "^1.0.3", + "minimatch": "^3.0.4", + "object.values": "^1.1.0", + "read-pkg-up": "^2.0.0", + "resolve": "^1.12.0" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + } + }, + "doctrine": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", + "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "dev": true, + "requires": { + "esutils": "^2.0.2", + "isarray": "^1.0.0" + } + }, + "load-json-file": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", + "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "strip-bom": "^3.0.0" + } + }, + "path-type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "dev": true, + "requires": { + "pify": "^2.0.0" + } + }, + "read-pkg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", + "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "dev": true, + "requires": { + "load-json-file": "^2.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^2.0.0" + } + }, + "read-pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", + "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "dev": true, + "requires": { + "find-up": "^2.0.0", + "read-pkg": "^2.0.0" + } + }, + "resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true + } } }, - "enzyme-adapter-utils": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/enzyme-adapter-utils/-/enzyme-adapter-utils-1.9.1.tgz", - "integrity": "sha512-LWc88BbKztLXlpRf5Ba/pSMJRaNezAwZBvis3N/IuB65ltZEh2E2obWU9B36pAbw7rORYeBUuqc79OL17ZzN1A==", + "eslint-plugin-jsx-a11y": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.2.3.tgz", + "integrity": "sha512-CawzfGt9w83tyuVekn0GDPU9ytYtxyxyFZ3aSWROmnRRFQFT2BiPJd7jvRdzNDi6oLWaS2asMeYSNMjWTV4eNg==", "dev": true, "requires": { - "function.prototype.name": "^1.1.0", - "object.assign": "^4.1.0", - "prop-types": "^15.6.2", - "semver": "^5.6.0" + "@babel/runtime": "^7.4.5", + "aria-query": "^3.0.0", + "array-includes": "^3.0.3", + "ast-types-flow": "^0.0.7", + "axobject-query": "^2.0.2", + "damerau-levenshtein": "^1.0.4", + "emoji-regex": "^7.0.2", + "has": "^1.0.3", + "jsx-ast-utils": "^2.2.1" + }, + "dependencies": { + "@babel/runtime": { + "version": "7.9.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.9.2.tgz", + "integrity": "sha512-NE2DtOdufG7R5vnfQUTehdTfNycfUANEtCa9PssN9O/xmTzP4E08UI797ixaei6hBEVL9BI/PsdJS5x7mWoB9Q==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "regenerator-runtime": { + "version": "0.13.5", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", + "integrity": "sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==", + "dev": true + } } }, - "enzyme-to-json": { - "version": "3.3.5", - "resolved": "https://registry.npmjs.org/enzyme-to-json/-/enzyme-to-json-3.3.5.tgz", - "integrity": "sha512-DmH1wJ68HyPqKSYXdQqB33ZotwfUhwQZW3IGXaNXgR69Iodaoj8TF/D9RjLdz4pEhGq2Tx2zwNUIjBuqoZeTgA==", + "eslint-plugin-react": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.19.0.tgz", + "integrity": "sha512-SPT8j72CGuAP+JFbT0sJHOB80TX/pu44gQ4vXH/cq+hQTiY2PuZ6IHkqXJV6x1b28GDdo1lbInjKUrrdUf0LOQ==", "dev": true, "requires": { - "lodash": "^4.17.4" - } - }, - "errno": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.7.tgz", - "integrity": "sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg==", - "requires": { - "prr": "~1.0.1" - } - }, - "error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", - "requires": { - "is-arrayish": "^0.2.1" - } - }, - "es-abstract": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.13.0.tgz", - "integrity": "sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg==", - "requires": { - "es-to-primitive": "^1.2.0", - "function-bind": "^1.1.1", + "array-includes": "^3.1.1", + "doctrine": "^2.1.0", "has": "^1.0.3", - "is-callable": "^1.1.4", - "is-regex": "^1.0.4", - "object-keys": "^1.0.12" - } - }, - "es-to-primitive": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", - "integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==", - "requires": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - } - }, - "es5-ext": { - "version": "0.10.46", - "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.46.tgz", - "integrity": "sha512-24XxRvJXNFwEMpJb3nOkiRJKRoupmjYmOPVlI65Qy2SrtxwOTB+g6ODjBKOtwEHbYrhWRty9xxOWLNdClT2djw==", - "requires": { - "es6-iterator": "~2.0.3", - "es6-symbol": "~3.1.1", - "next-tick": "1" - } - }, - "es6-iterator": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", - "integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=", - "requires": { - "d": "1", - "es5-ext": "^0.10.35", - "es6-symbol": "^3.1.1" - } - }, - "es6-map": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/es6-map/-/es6-map-0.1.5.tgz", - "integrity": "sha1-kTbgUD3MBqMBaQ8LsU/042TpSfA=", - "requires": { - "d": "1", - "es5-ext": "~0.10.14", - "es6-iterator": "~2.0.1", - "es6-set": "~0.1.5", - "es6-symbol": "~3.1.1", - "event-emitter": "~0.3.5" + "jsx-ast-utils": "^2.2.3", + "object.entries": "^1.1.1", + "object.fromentries": "^2.0.2", + "object.values": "^1.1.1", + "prop-types": "^15.7.2", + "resolve": "^1.15.1", + "semver": "^6.3.0", + "string.prototype.matchall": "^4.0.2", + "xregexp": "^4.3.0" + }, + "dependencies": { + "doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, + "es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "object.entries": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.1.tgz", + "integrity": "sha512-ilqR7BgdyZetJutmDPfXCDffGa0/Yzl2ivVNpbx/g4UeWrCdRnFDUBrKJGLhGieRHDATnyZXWBeCb29k9CJysQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, + "object.fromentries": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.2.tgz", + "integrity": "sha512-r3ZiBH7MQppDJVLx6fhD618GKNG40CZYH9wgwdhKxBDDbQgjeWGGd4AtkZad84d291YxvWe7bJGuE65Anh0dxQ==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, + "object.values": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.1.tgz", + "integrity": "sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, + "prop-types": { + "version": "15.7.2", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.7.2.tgz", + "integrity": "sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==", + "dev": true, + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.8.1" + } + }, + "react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "dev": true + }, + "resolve": { + "version": "1.15.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.1.tgz", + "integrity": "sha512-84oo6ZTtoTUpjgNEr5SJyzQhzL72gaRodsSfyxC/AXRvwu0Yse9H8eF9IpGo7b8YetZhlI6v7ZQ6bKBFV/6S7w==", + "dev": true, + "requires": { + "path-parse": "^1.0.6" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "string.prototype.trimleft": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", + "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimstart": "^1.0.0" + } + }, + "string.prototype.trimright": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", + "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimend": "^1.0.0" + } + } } }, - "es6-promise": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.5.tgz", - "integrity": "sha512-n6wvpdE43VFtJq+lUDYDBFUwV8TZbuGXLV4D6wKafg13ldznKsyEvatubnmUe31zcvelSzOHF+XbaT+Bl9ObDg==" + "eslint-plugin-react-hooks": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-1.7.0.tgz", + "integrity": "sha512-iXTCFcOmlWvw4+TOE8CLWj6yX1GwzT0Y6cUfHHZqWnSk144VmVIRcVGtUAzrLES7C798lmvnt02C7rxaOX1HNA==", + "dev": true }, - "es6-promisify": { + "eslint-scope": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/es6-promisify/-/es6-promisify-5.0.0.tgz", - "integrity": "sha1-UQnWLz5W6pZ8S2NQWu8IKRyKUgM=", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", + "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", "dev": true, "requires": { - "es6-promise": "^4.0.3" + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" } }, - "es6-set": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/es6-set/-/es6-set-0.1.5.tgz", - "integrity": "sha1-0rPsXU2ADO2BjbU40ol02wpzzLE=", + "eslint-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.0.0.tgz", + "integrity": "sha512-0HCPuJv+7Wv1bACm8y5/ECVfYdfsAm9xmVb7saeFlxjPYALefjhbYoCkBjPdPzGH8wWyTpAez82Fh3VKYEZ8OA==", + "dev": true, "requires": { - "d": "1", - "es5-ext": "~0.10.14", - "es6-iterator": "~2.0.1", - "es6-symbol": "3.1.1", - "event-emitter": "~0.3.5" + "eslint-visitor-keys": "^1.1.0" } }, - "es6-symbol": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.1.tgz", - "integrity": "sha1-vwDvT9q2uhtG7Le2KbTH7VcVzHc=", - "requires": { - "d": "1", - "es5-ext": "~0.10.14" - } + "eslint-visitor-keys": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", + "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==", + "dev": true }, - "es6-weak-map": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-2.0.2.tgz", - "integrity": "sha1-XjqzIlH/0VOKH45f+hNXdy+S2W8=", + "espree": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", + "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", + "dev": true, "requires": { - "d": "1", - "es5-ext": "^0.10.14", - "es6-iterator": "^2.0.1", - "es6-symbol": "^3.1.1" + "acorn": "^7.1.1", + "acorn-jsx": "^5.2.0", + "eslint-visitor-keys": "^1.1.0" } }, - "escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" }, - "escodegen": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.11.0.tgz", - "integrity": "sha512-IeMV45ReixHS53K/OmfKAIztN/igDHzTJUhZM3k1jMhIZWjk45SMwAtBsEXiJp3vSPmTcu6CXn7mDvFHRN66fw==", + "esquery": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.2.0.tgz", + "integrity": "sha512-weltsSqdeWIX9G2qQZz7KlTRJdkkOCTPgLYJUz1Hacf48R4YOwGPHO3+ORfWedqJKbq5WQmsgK90n+pFLIKt/Q==", + "dev": true, "requires": { - "esprima": "^3.1.3", - "estraverse": "^4.2.0", - "esutils": "^2.0.2", - "optionator": "^0.8.1", - "source-map": "~0.6.1" + "estraverse": "^5.0.0" }, "dependencies": { - "esprima": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-3.1.3.tgz", - "integrity": "sha1-/cpRzuYTOJXjyI1TXOSdv/YqRjM=" - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "optional": true + "estraverse": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.0.0.tgz", + "integrity": "sha512-j3acdrMzqrxmJTNj5dbr1YbjacrYgAxVMeF0gK16E3j494mOe7xygM/ZLIguEQ0ETwAg2hlJCtHRGav+y0Ny5A==", + "dev": true } } }, - "escope": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/escope/-/escope-3.6.0.tgz", - "integrity": "sha1-4Bl16BJ4GhY6ba392AOY3GTIicM=", - "requires": { - "es6-map": "^0.1.3", - "es6-weak-map": "^2.0.1", - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" - } - }, - "esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" - }, "esrecurse": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", + "dev": true, "requires": { "estraverse": "^4.1.0" } @@ -4636,26 +8321,26 @@ "estraverse": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz", - "integrity": "sha1-De4/7TH81GlhjOc0IJn8GvoL2xM=" + "integrity": "sha1-De4/7TH81GlhjOc0IJn8GvoL2xM=", + "dev": true }, "esutils": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", - "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=" + "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=", + "dev": true }, "etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" + "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=", + "dev": true }, - "event-emitter": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/event-emitter/-/event-emitter-0.3.5.tgz", - "integrity": "sha1-34xp7vFkeSPHFXuc6DhAYQsCzDk=", - "requires": { - "d": "1", - "es5-ext": "~0.10.14" - } + "event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "dev": true }, "eventemitter3": { "version": "3.1.0", @@ -4663,42 +8348,44 @@ "integrity": "sha512-ivIvhpq/Y0uSjcHDcOIccjmYjGLcP09MFGE7ysAwkAvkXfpZlC985pH2/ui64DKazbTW/4kN3yqozUxlXzI6cA==" }, "events": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", - "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.1.0.tgz", + "integrity": "sha512-Rv+u8MLHNOdMjTAFeT3nCjHn2aGlx435FP/sDHNaRhDEMwyI/aB22Kj2qIN8R0cw3z28psEQLYwxVKLsKrMgWg==", + "dev": true }, "eventsource": { - "version": "0.1.6", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-0.1.6.tgz", - "integrity": "sha1-Cs7ehJ7X3RzMMsgRuxG5RNTykjI=", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-1.0.7.tgz", + "integrity": "sha512-4Ln17+vVT0k8aWq+t/bF5arcS3EpT9gYtW66EPacdj/mAFevznsnyoHLPy2BA8gbIQeIHoPsvwmfBftfcG//BQ==", + "dev": true, "requires": { - "original": ">=0.0.5" + "original": "^1.0.0" } }, "evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", + "dev": true, "requires": { "md5.js": "^1.3.4", "safe-buffer": "^5.1.1" } }, "exec-sh": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/exec-sh/-/exec-sh-0.2.2.tgz", - "integrity": "sha512-FIUCJz1RbuS0FKTdaAafAByGS0CPvU3R0MeHxgtl+djzCc//F8HakL8GzmVNZanasTbTAY/3DRFA0KpVqj/eAw==", - "requires": { - "merge": "^1.2.0" - } + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/exec-sh/-/exec-sh-0.3.4.tgz", + "integrity": "sha512-sEFIkc61v75sWeOe72qyrqg2Qg0OuLESziUDk/O/z2qgS15y2gWVFrI6f2Qn/qw/0/NCfCEsmNA4zOjkwEZT1A==", + "dev": true }, "execa": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", - "integrity": "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c=", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz", + "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==", + "dev": true, "requires": { - "cross-spawn": "^5.0.1", - "get-stream": "^3.0.0", + "cross-spawn": "^6.0.0", + "get-stream": "^4.0.0", "is-stream": "^1.1.0", "npm-run-path": "^2.0.0", "p-finally": "^1.0.0", @@ -4706,16 +8393,11 @@ "strip-eof": "^1.0.0" } }, - "exif-parser": { - "version": "0.1.12", - "resolved": "https://registry.npmjs.org/exif-parser/-/exif-parser-0.1.12.tgz", - "integrity": "sha1-WKnS1ywCwfbwKg70qRZicrd2CSI=", - "dev": true - }, "exit": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", - "integrity": "sha1-BjJjj42HfMghB9MKD/8aF8uhzQw=" + "integrity": "sha1-BjJjj42HfMghB9MKD/8aF8uhzQw=", + "dev": true }, "expand-brackets": { "version": "2.1.4", @@ -4757,134 +8439,158 @@ } } }, - "expand-range": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/expand-range/-/expand-range-1.8.2.tgz", - "integrity": "sha1-opnv/TNf4nIeuujiV+x5ZE/IUzc=", - "requires": { - "fill-range": "^2.1.0" - }, - "dependencies": { - "fill-range": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-2.2.4.tgz", - "integrity": "sha512-cnrcCbj01+j2gTG921VZPnHbjmdAf8oQV/iGeV2kZxGSyfYjjTyY79ErsK1WJWMpw6DaApEX72binqJE+/d+5Q==", - "requires": { - "is-number": "^2.1.0", - "isobject": "^2.0.0", - "randomatic": "^3.0.0", - "repeat-element": "^1.1.2", - "repeat-string": "^1.5.2" - } - }, - "is-number": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-2.1.0.tgz", - "integrity": "sha1-Afy7s5NGOlSPL0ZszhbezknbkI8=", - "requires": { - "kind-of": "^3.0.2" - } - }, - "isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "requires": { - "isarray": "1.0.0" - } - }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "expand-tilde": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/expand-tilde/-/expand-tilde-2.0.2.tgz", - "integrity": "sha1-l+gBqgUt8CRU3kawK/YhZCzchQI=", - "requires": { - "homedir-polyfill": "^1.0.1" - } - }, "expect": { - "version": "22.4.3", - "resolved": "http://registry.npmjs.org/expect/-/expect-22.4.3.tgz", - "integrity": "sha512-XcNXEPehqn8b/jm8FYotdX0YrXn36qp4HWlrVT4ktwQas1l1LPxiVWncYnnL2eyMtKAmVIaG0XAp0QlrqJaxaA==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-24.9.0.tgz", + "integrity": "sha512-wvVAx8XIol3Z5m9zvZXiyZOQ+sRJqNTIm6sGjdWlaZIeupQGO3WbYI+15D/AmEwZywL6wtJkbAbJtzkOfBuR0Q==", + "dev": true, "requires": { + "@jest/types": "^24.9.0", "ansi-styles": "^3.2.0", - "jest-diff": "^22.4.3", - "jest-get-type": "^22.4.3", - "jest-matcher-utils": "^22.4.3", - "jest-message-util": "^22.4.3", - "jest-regex-util": "^22.4.3" + "jest-get-type": "^24.9.0", + "jest-matcher-utils": "^24.9.0", + "jest-message-util": "^24.9.0", + "jest-regex-util": "^24.9.0" } }, "express": { - "version": "4.16.4", - "resolved": "https://registry.npmjs.org/express/-/express-4.16.4.tgz", - "integrity": "sha512-j12Uuyb4FMrd/qQAm6uCHAkPtO8FDTRJZBDd5D2KOL2eLaz1yUNdUB/NOIyq0iU4q4cFarsUCrnFDPBcnksuOg==", + "version": "4.17.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", + "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==", + "dev": true, "requires": { - "accepts": "~1.3.5", + "accepts": "~1.3.7", "array-flatten": "1.1.1", - "body-parser": "1.18.3", - "content-disposition": "0.5.2", + "body-parser": "1.19.0", + "content-disposition": "0.5.3", "content-type": "~1.0.4", - "cookie": "0.3.1", + "cookie": "0.4.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "~1.1.2", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.1.1", + "finalhandler": "~1.1.2", "fresh": "0.5.2", "merge-descriptors": "1.0.1", "methods": "~1.1.2", "on-finished": "~2.3.0", - "parseurl": "~1.3.2", + "parseurl": "~1.3.3", "path-to-regexp": "0.1.7", - "proxy-addr": "~2.0.4", - "qs": "6.5.2", - "range-parser": "~1.2.0", + "proxy-addr": "~2.0.5", + "qs": "6.7.0", + "range-parser": "~1.2.1", "safe-buffer": "5.1.2", - "send": "0.16.2", - "serve-static": "1.13.2", - "setprototypeof": "1.1.0", - "statuses": "~1.4.0", - "type-is": "~1.6.16", + "send": "0.17.1", + "serve-static": "1.14.1", + "setprototypeof": "1.1.1", + "statuses": "~1.5.0", + "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" }, "dependencies": { + "accepts": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", + "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", + "dev": true, + "requires": { + "mime-types": "~2.1.24", + "negotiator": "0.6.2" + } + }, "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=", + "dev": true }, "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, "requires": { "ms": "2.0.0" } }, + "mime-db": { + "version": "1.43.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz", + "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==", + "dev": true + }, + "mime-types": { + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", + "dev": true, + "requires": { + "mime-db": "1.43.0" + } + }, + "negotiator": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", + "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==", + "dev": true + }, + "parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "dev": true + }, "path-to-regexp": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=", + "dev": true + }, + "qs": { + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", + "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==", + "dev": true + }, + "setprototypeof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==", + "dev": true + }, + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", + "dev": true + } + } + }, + "ext": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/ext/-/ext-1.4.0.tgz", + "integrity": "sha512-Key5NIsUxdqKg3vIsdw9dSuXpPCQ297y6wBjL30edxwPgt2E44WcWBZey/ZvUc6sERLTxKdyCu4gZFmUbk1Q7A==", + "dev": true, + "requires": { + "type": "^2.0.0" + }, + "dependencies": { + "type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/type/-/type-2.0.0.tgz", + "integrity": "sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow==", + "dev": true } } }, "extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "dev": true }, "extend-shallow": { "version": "3.0.2", @@ -4906,12 +8612,13 @@ } }, "external-editor": { - "version": "2.2.0", - "resolved": "http://registry.npmjs.org/external-editor/-/external-editor-2.2.0.tgz", - "integrity": "sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, "requires": { - "chardet": "^0.4.0", - "iconv-lite": "^0.4.17", + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", "tmp": "^0.0.33" } }, @@ -4974,79 +8681,89 @@ } } }, - "extract-text-webpack-plugin": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extract-text-webpack-plugin/-/extract-text-webpack-plugin-3.0.2.tgz", - "integrity": "sha512-bt/LZ4m5Rqt/Crl2HiKuAl/oqg0psx1tsTLkvWbJen1CtD+fftkZhMaQ9HOtY2gWsl2Wq+sABmMVi9z3DhKWQQ==", - "requires": { - "async": "^2.4.1", - "loader-utils": "^1.1.0", - "schema-utils": "^0.3.0", - "webpack-sources": "^1.0.1" - } + "extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "dev": true }, - "extract-zip": { - "version": "1.6.7", - "resolved": "https://registry.npmjs.org/extract-zip/-/extract-zip-1.6.7.tgz", - "integrity": "sha1-qEC0uK9kAyZMjbV/Txp0Mz74H+k=", + "fast-deep-equal": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.1.tgz", + "integrity": "sha512-8UEa58QDLauDNfpbrX55Q9jrGHThw2ZMdOky5Gl1CDtVeJDPVrG4Jxx1N8jw2gkWaff5UUuX1KJd+9zGe2B+ZA==", + "dev": true + }, + "fast-glob": { + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-2.2.7.tgz", + "integrity": "sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw==", "dev": true, "requires": { - "concat-stream": "1.6.2", - "debug": "2.6.9", - "mkdirp": "0.5.1", - "yauzl": "2.4.1" + "@mrmlnc/readdir-enhanced": "^2.2.1", + "@nodelib/fs.stat": "^1.1.2", + "glob-parent": "^3.1.0", + "is-glob": "^4.0.0", + "merge2": "^1.2.3", + "micromatch": "^3.1.10" }, "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", "dev": true, "requires": { - "ms": "2.0.0" + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, + "requires": { + "is-extglob": "^2.1.0" + } + } } } } }, - "extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" - }, - "fast-deep-equal": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", - "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=" - }, "fast-json-stable-stringify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" + "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", + "dev": true }, "fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=" + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "dev": true }, - "fastparse": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/fastparse/-/fastparse-1.1.2.tgz", - "integrity": "sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==" + "fast-text-encoding": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.0.tgz", + "integrity": "sha512-R9bHCvweUxxwkDwhjav5vxpFvdPGlVngtqmx4pIZfSUhM/Q4NiIUHB456BAf+Q1Nwu3HEZYONtu+Rya+af4jiQ==", + "dev": true }, "faye-websocket": { - "version": "0.11.1", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.1.tgz", - "integrity": "sha1-8O/hjE9W5PQK/H4Gxxn9XuYYjzg=", + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.10.0.tgz", + "integrity": "sha1-TkkvjQTftviQA1B/btvy1QHnxvQ=", + "dev": true, "requires": { "websocket-driver": ">=0.5.1" } }, "fb-watchman": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.0.tgz", - "integrity": "sha1-VOmr99+i8mzZsWNsWIwa/AXeXVg=", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.1.tgz", + "integrity": "sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg==", + "dev": true, "requires": { - "bser": "^2.0.0" + "bser": "2.1.1" } }, "fbjs": { @@ -5063,55 +8780,70 @@ "ua-parser-js": "^0.7.18" } }, - "fd-slicer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.0.1.tgz", - "integrity": "sha1-i1vL2ewyfFBBv5qwI/1nUPEXfmU=", - "dev": true, - "requires": { - "pend": "~1.2.0" - } + "figgy-pudding": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/figgy-pudding/-/figgy-pudding-3.5.1.tgz", + "integrity": "sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w==", + "dev": true }, "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", + "dev": true, "requires": { "escape-string-regexp": "^1.0.5" } }, - "file-loader": { - "version": "0.11.2", - "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-0.11.2.tgz", - "integrity": "sha512-N+uhF3mswIFeziHQjGScJ/yHXYt3DiLBeC+9vWW+WjUBiClMSOlV1YrXQi+7KM2aA3Rn4Bybgv+uXFQbfkzpvg==", + "file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "dev": true, "requires": { - "loader-utils": "^1.0.2" + "flat-cache": "^2.0.1" } }, - "file-type": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", - "integrity": "sha1-JXoHg4TR24CHvESdEH1SpSZyuek=", - "dev": true - }, - "filename-regex": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/filename-regex/-/filename-regex-2.0.1.tgz", - "integrity": "sha1-wcS5vuPglyXdsQa3XB4wH+LxiyY=" - }, - "fileset": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/fileset/-/fileset-2.0.3.tgz", - "integrity": "sha1-jnVIqW08wjJ+5eZ0FocjozO7oqA=", + "file-loader": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-4.3.0.tgz", + "integrity": "sha512-aKrYPYjF1yG3oX0kWRrqrSMfgftm7oJW5M+m4owoldH5C51C0RkIwB++JbRvEW3IU6/ZG5n8UvEcdgwOt2UOWA==", + "dev": true, "requires": { - "glob": "^7.0.3", - "minimatch": "^3.0.3" + "loader-utils": "^1.2.3", + "schema-utils": "^2.5.0" + }, + "dependencies": { + "ajv-keywords": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", + "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==", + "dev": true + }, + "schema-utils": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.6.5.tgz", + "integrity": "sha512-5KXuwKziQrTVHh8j/Uxz+QUbxkaLW9X/86NBlx/gnKgtsZA2GIVMUn17qWhRFwF8jdYb3Dig5hRO/W5mZqy6SQ==", + "dev": true, + "requires": { + "ajv": "^6.12.0", + "ajv-keywords": "^3.4.1" + } + } } }, + "file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "dev": true, + "optional": true + }, "filesize": { - "version": "3.5.11", - "resolved": "https://registry.npmjs.org/filesize/-/filesize-3.5.11.tgz", - "integrity": "sha512-ZH7loueKBoDb7yG9esn1U+fgq7BzlzW6NRi5/rMdxIZ05dj7GFD/Xc5rq2CDt5Yq86CyfSYVyx4242QQNZbx1g==" + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/filesize/-/filesize-3.6.1.tgz", + "integrity": "sha512-7KjR1vv6qnicaPMi1iiTcI85CyYwRO/PSFCu6SvqL8jN2Wjt/NIYQTFtFs7fSDCYOstUkEWIQGFUg5YZQfjlcg==", + "dev": true }, "filewatcher": { "version": "3.0.1", @@ -5144,16 +8876,17 @@ } }, "finalhandler": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz", - "integrity": "sha512-Y1GUDo39ez4aHAw7MysnUD5JzYX+WaIj8I57kO3aEPT1fFRL4sr7mjei97FgnwhAyyzRYmQZaTHb2+9uZ1dPtg==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", + "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", + "dev": true, "requires": { "debug": "2.6.9", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "on-finished": "~2.3.0", - "parseurl": "~1.3.2", - "statuses": "~1.4.0", + "parseurl": "~1.3.3", + "statuses": "~1.5.0", "unpipe": "~1.0.0" }, "dependencies": { @@ -5161,44 +8894,127 @@ "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, "requires": { "ms": "2.0.0" } + }, + "parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "dev": true + }, + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", + "dev": true } } }, "find-cache-dir": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-1.0.0.tgz", - "integrity": "sha1-kojj6ePMN0hxfTnq3hfPcfww7m8=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "dev": true, "requires": { "commondir": "^1.0.1", - "make-dir": "^1.0.0", - "pkg-dir": "^2.0.0" + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz", + "integrity": "sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "dev": true, + "requires": { + "find-up": "^3.0.0" + } + } } }, - "find-index": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/find-index/-/find-index-0.1.1.tgz", - "integrity": "sha1-Z101iyyjiS15Whq0cjL4tuLg3eQ=" - }, "find-up": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, "requires": { "locate-path": "^2.0.0" } }, + "flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "dev": true, + "requires": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + } + }, + "flatted": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", + "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==", + "dev": true + }, "flatten": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/flatten/-/flatten-1.0.2.tgz", - "integrity": "sha1-2uRqnXj74lKSJYzB54CkHZXAN4I=" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/flatten/-/flatten-1.0.3.tgz", + "integrity": "sha512-dVsPA/UwQ8+2uoFe5GHtiBMu48dWLTdsuEd7CKGlZlD78r1TTWBvDuFaFGKCo/ZfEr95Uk56vZoX86OsHkUeIg==", + "dev": true }, "flush-write-stream": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.3.tgz", "integrity": "sha512-calZMC10u0FMUqoiunI2AiGIIUtUIvifNwkHhNupZH4cbNnW1Itkoh/Nf5HFYmDrwWPjrUxpkZT0KhuCq0jmGw==", + "dev": true, "requires": { "inherits": "^2.0.1", "readable-stream": "^2.0.4" @@ -5212,15 +9028,6 @@ "debug": "=3.1.0" } }, - "for-each": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", - "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", - "dev": true, - "requires": { - "is-callable": "^1.1.3" - } - }, "for-in": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", @@ -5230,6 +9037,7 @@ "version": "0.1.5", "resolved": "https://registry.npmjs.org/for-own/-/for-own-0.1.5.tgz", "integrity": "sha1-UmXGgaTylNq78XyVCbZ2OqhFEM4=", + "dev": true, "requires": { "for-in": "^1.0.1" } @@ -5237,27 +9045,30 @@ "forever-agent": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "dev": true }, "fork-ts-checker-webpack-plugin": { - "version": "0.2.10", - "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-0.2.10.tgz", - "integrity": "sha1-0KQIDnfp9dbjtDzc59JmWPnSUMY=", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-3.1.1.tgz", + "integrity": "sha512-DuVkPNrM12jR41KM2e+N+styka0EgLkTnXmNcXdgOM37vtGeY+oCBK/Jx0hzSeEU6memFCtWb4htrHPMDfwwUQ==", + "dev": true, "requires": { "babel-code-frame": "^6.22.0", - "chalk": "^1.1.3", - "chokidar": "^1.7.0", - "lodash.endswith": "^4.2.1", - "lodash.isfunction": "^3.0.8", - "lodash.isstring": "^4.0.1", - "lodash.startswith": "^4.2.1", - "minimatch": "^3.0.4" + "chalk": "^2.4.1", + "chokidar": "^3.3.0", + "micromatch": "^3.1.10", + "minimatch": "^3.0.4", + "semver": "^5.6.0", + "tapable": "^1.0.0", + "worker-rpc": "^0.1.0" } }, "form-data": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.6", @@ -5267,7 +9078,8 @@ "forwarded": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", - "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=" + "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=", + "dev": true }, "fragment-cache": { "version": "0.2.1", @@ -5278,38 +9090,59 @@ } }, "free-style": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/free-style/-/free-style-2.5.1.tgz", - "integrity": "sha512-X7dtUSTrlS1KRQBtiQ618NWIRDdRgD91IeajKCSh0fgTqArSixv+n3ea6F/OSvrvg14tPLR+yCq2s+O602+pRw==" + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/free-style/-/free-style-2.6.1.tgz", + "integrity": "sha512-uaVA8e57tvhrFKAl6x32SGIrGFBoeTAFtfHDzWxjPhiXQiUxOI6EEdEReRkjNO2H9XcdMJXXEnMHw8Q7iMYLbw==" }, "fresh": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=", + "dev": true }, "from2": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", "integrity": "sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8=", + "dev": true, "requires": { "inherits": "^2.0.1", "readable-stream": "^2.0.0" } }, - "fs-extra": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-3.0.1.tgz", - "integrity": "sha1-N5TzeMWLNC6n27sjCVEJxLO2IpE=", + "fs-extra": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz", + "integrity": "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==", + "dev": true, + "requires": { + "graceful-fs": "^4.2.0", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + }, + "dependencies": { + "graceful-fs": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", + "dev": true + } + } + }, + "fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, "requires": { - "graceful-fs": "^4.1.2", - "jsonfile": "^3.0.0", - "universalify": "^0.1.0" + "minipass": "^3.0.0" } }, "fs-write-stream-atomic": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz", "integrity": "sha1-tH31NJPvkR33VzHnCp3tAYnbQMk=", + "dev": true, "requires": { "graceful-fs": "^4.1.2", "iferr": "^0.1.5", @@ -5320,35 +9153,43 @@ "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true }, "fsevents": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.4.tgz", - "integrity": "sha512-z8H8/diyk76B7q5wg+Ud0+CqzcAF3mBBI/bA5ne5zrRUUIvNkJY//D3BqyH571KuAC4Nr7Rw7CjWX4r0y9DvNg==", + "version": "1.2.12", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.12.tgz", + "integrity": "sha512-Ggd/Ktt7E7I8pxZRbGIs7vwqAPscSESMrCSkx2FtWeqmheJgCo2R74fTsZFCifr0VTPwqRpPv17+6b8Zp7th0Q==", + "dev": true, "optional": true, "requires": { - "nan": "^2.9.2", - "node-pre-gyp": "^0.10.0" + "bindings": "^1.5.0", + "nan": "^2.12.1", + "node-pre-gyp": "*" }, "dependencies": { "abbrev": { "version": "1.1.1", "bundled": true, + "dev": true, "optional": true }, "ansi-regex": { "version": "2.1.1", - "bundled": true + "bundled": true, + "dev": true, + "optional": true }, "aproba": { "version": "1.2.0", "bundled": true, + "dev": true, "optional": true }, "are-we-there-yet": { - "version": "1.1.4", + "version": "1.1.5", "bundled": true, + "dev": true, "optional": true, "requires": { "delegates": "^1.0.0", @@ -5357,77 +9198,96 @@ }, "balanced-match": { "version": "1.0.0", - "bundled": true + "bundled": true, + "dev": true, + "optional": true }, "brace-expansion": { "version": "1.1.11", "bundled": true, + "dev": true, + "optional": true, "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "chownr": { - "version": "1.0.1", + "version": "1.1.4", "bundled": true, + "dev": true, "optional": true }, "code-point-at": { "version": "1.1.0", - "bundled": true + "bundled": true, + "dev": true, + "optional": true }, "concat-map": { "version": "0.0.1", - "bundled": true + "bundled": true, + "dev": true, + "optional": true }, "console-control-strings": { "version": "1.1.0", - "bundled": true + "bundled": true, + "dev": true, + "optional": true }, "core-util-is": { "version": "1.0.2", "bundled": true, + "dev": true, "optional": true }, "debug": { - "version": "2.6.9", + "version": "3.2.6", "bundled": true, + "dev": true, "optional": true, "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" } }, "deep-extend": { - "version": "0.5.1", + "version": "0.6.0", "bundled": true, + "dev": true, "optional": true }, "delegates": { "version": "1.0.0", "bundled": true, + "dev": true, "optional": true }, "detect-libc": { "version": "1.0.3", "bundled": true, + "dev": true, "optional": true }, "fs-minipass": { - "version": "1.2.5", + "version": "1.2.7", "bundled": true, + "dev": true, "optional": true, "requires": { - "minipass": "^2.2.1" + "minipass": "^2.6.0" } }, "fs.realpath": { "version": "1.0.0", "bundled": true, + "dev": true, "optional": true }, "gauge": { "version": "2.7.4", "bundled": true, + "dev": true, "optional": true, "requires": { "aproba": "^1.0.3", @@ -5441,8 +9301,9 @@ } }, "glob": { - "version": "7.1.2", + "version": "7.1.6", "bundled": true, + "dev": true, "optional": true, "requires": { "fs.realpath": "^1.0.0", @@ -5456,19 +9317,22 @@ "has-unicode": { "version": "2.0.1", "bundled": true, + "dev": true, "optional": true }, "iconv-lite": { - "version": "0.4.21", + "version": "0.4.24", "bundled": true, + "dev": true, "optional": true, "requires": { - "safer-buffer": "^2.1.0" + "safer-buffer": ">= 2.1.2 < 3" } }, "ignore-walk": { - "version": "3.0.1", + "version": "3.0.3", "bundled": true, + "dev": true, "optional": true, "requires": { "minimatch": "^3.0.4" @@ -5477,6 +9341,7 @@ "inflight": { "version": "1.0.6", "bundled": true, + "dev": true, "optional": true, "requires": { "once": "^1.3.0", @@ -5484,17 +9349,22 @@ } }, "inherits": { - "version": "2.0.3", - "bundled": true + "version": "2.0.4", + "bundled": true, + "dev": true, + "optional": true }, "ini": { "version": "1.3.5", "bundled": true, + "dev": true, "optional": true }, "is-fullwidth-code-point": { "version": "1.0.0", "bundled": true, + "dev": true, + "optional": true, "requires": { "number-is-nan": "^1.0.0" } @@ -5502,77 +9372,91 @@ "isarray": { "version": "1.0.0", "bundled": true, + "dev": true, "optional": true }, "minimatch": { "version": "3.0.4", "bundled": true, + "dev": true, + "optional": true, "requires": { "brace-expansion": "^1.1.7" } }, "minimist": { - "version": "0.0.8", - "bundled": true + "version": "1.2.5", + "bundled": true, + "dev": true, + "optional": true }, "minipass": { - "version": "2.2.4", + "version": "2.9.0", "bundled": true, + "dev": true, + "optional": true, "requires": { - "safe-buffer": "^5.1.1", + "safe-buffer": "^5.1.2", "yallist": "^3.0.0" } }, "minizlib": { - "version": "1.1.0", + "version": "1.3.3", "bundled": true, + "dev": true, "optional": true, "requires": { - "minipass": "^2.2.1" + "minipass": "^2.9.0" } }, "mkdirp": { - "version": "0.5.1", + "version": "0.5.3", "bundled": true, + "dev": true, + "optional": true, "requires": { - "minimist": "0.0.8" + "minimist": "^1.2.5" } }, "ms": { - "version": "2.0.0", + "version": "2.1.2", "bundled": true, + "dev": true, "optional": true }, "needle": { - "version": "2.2.0", + "version": "2.3.3", "bundled": true, + "dev": true, "optional": true, "requires": { - "debug": "^2.1.2", + "debug": "^3.2.6", "iconv-lite": "^0.4.4", "sax": "^1.2.4" } }, "node-pre-gyp": { - "version": "0.10.0", + "version": "0.14.0", "bundled": true, + "dev": true, "optional": true, "requires": { "detect-libc": "^1.0.2", "mkdirp": "^0.5.1", - "needle": "^2.2.0", + "needle": "^2.2.1", "nopt": "^4.0.1", "npm-packlist": "^1.1.6", "npmlog": "^4.0.2", - "rc": "^1.1.7", + "rc": "^1.2.7", "rimraf": "^2.6.1", "semver": "^5.3.0", - "tar": "^4" + "tar": "^4.4.2" } }, "nopt": { - "version": "4.0.1", + "version": "4.0.3", "bundled": true, + "dev": true, "optional": true, "requires": { "abbrev": "1", @@ -5580,22 +9464,35 @@ } }, "npm-bundled": { - "version": "1.0.3", + "version": "1.1.1", + "bundled": true, + "dev": true, + "optional": true, + "requires": { + "npm-normalize-package-bin": "^1.0.1" + } + }, + "npm-normalize-package-bin": { + "version": "1.0.1", "bundled": true, + "dev": true, "optional": true }, "npm-packlist": { - "version": "1.1.10", + "version": "1.4.8", "bundled": true, + "dev": true, "optional": true, "requires": { "ignore-walk": "^3.0.1", - "npm-bundled": "^1.0.1" + "npm-bundled": "^1.0.1", + "npm-normalize-package-bin": "^1.0.1" } }, "npmlog": { "version": "4.1.2", "bundled": true, + "dev": true, "optional": true, "requires": { "are-we-there-yet": "~1.1.2", @@ -5606,16 +9503,21 @@ }, "number-is-nan": { "version": "1.0.1", - "bundled": true + "bundled": true, + "dev": true, + "optional": true }, "object-assign": { "version": "4.1.1", "bundled": true, + "dev": true, "optional": true }, "once": { "version": "1.4.0", "bundled": true, + "dev": true, + "optional": true, "requires": { "wrappy": "1" } @@ -5623,16 +9525,19 @@ "os-homedir": { "version": "1.0.2", "bundled": true, + "dev": true, "optional": true }, "os-tmpdir": { "version": "1.0.2", "bundled": true, + "dev": true, "optional": true }, "osenv": { "version": "0.1.5", "bundled": true, + "dev": true, "optional": true, "requires": { "os-homedir": "^1.0.0", @@ -5642,34 +9547,31 @@ "path-is-absolute": { "version": "1.0.1", "bundled": true, + "dev": true, "optional": true }, "process-nextick-args": { - "version": "2.0.0", + "version": "2.0.1", "bundled": true, + "dev": true, "optional": true }, "rc": { - "version": "1.2.7", + "version": "1.2.8", "bundled": true, + "dev": true, "optional": true, "requires": { - "deep-extend": "^0.5.1", + "deep-extend": "^0.6.0", "ini": "~1.3.0", "minimist": "^1.2.0", "strip-json-comments": "~2.0.1" - }, - "dependencies": { - "minimist": { - "version": "1.2.0", - "bundled": true, - "optional": true - } } }, "readable-stream": { - "version": "2.3.6", + "version": "2.3.7", "bundled": true, + "dev": true, "optional": true, "requires": { "core-util-is": "~1.0.0", @@ -5682,45 +9584,55 @@ } }, "rimraf": { - "version": "2.6.2", + "version": "2.7.1", "bundled": true, + "dev": true, "optional": true, "requires": { - "glob": "^7.0.5" + "glob": "^7.1.3" } }, "safe-buffer": { - "version": "5.1.1", - "bundled": true + "version": "5.1.2", + "bundled": true, + "dev": true, + "optional": true }, "safer-buffer": { "version": "2.1.2", "bundled": true, + "dev": true, "optional": true }, "sax": { "version": "1.2.4", "bundled": true, + "dev": true, "optional": true }, "semver": { - "version": "5.5.0", + "version": "5.7.1", "bundled": true, + "dev": true, "optional": true }, "set-blocking": { "version": "2.0.0", "bundled": true, + "dev": true, "optional": true }, "signal-exit": { "version": "3.0.2", "bundled": true, + "dev": true, "optional": true }, "string-width": { "version": "1.0.2", "bundled": true, + "dev": true, + "optional": true, "requires": { "code-point-at": "^1.0.0", "is-fullwidth-code-point": "^1.0.0", @@ -5730,6 +9642,7 @@ "string_decoder": { "version": "1.1.1", "bundled": true, + "dev": true, "optional": true, "requires": { "safe-buffer": "~5.1.0" @@ -5738,6 +9651,8 @@ "strip-ansi": { "version": "3.0.1", "bundled": true, + "dev": true, + "optional": true, "requires": { "ansi-regex": "^2.0.0" } @@ -5745,75 +9660,320 @@ "strip-json-comments": { "version": "2.0.1", "bundled": true, + "dev": true, + "optional": true + }, + "tar": { + "version": "4.4.13", + "bundled": true, + "dev": true, + "optional": true, + "requires": { + "chownr": "^1.1.1", + "fs-minipass": "^1.2.5", + "minipass": "^2.8.6", + "minizlib": "^1.2.1", + "mkdirp": "^0.5.0", + "safe-buffer": "^5.1.2", + "yallist": "^3.0.3" + } + }, + "util-deprecate": { + "version": "1.0.2", + "bundled": true, + "dev": true, + "optional": true + }, + "wide-align": { + "version": "1.1.3", + "bundled": true, + "dev": true, + "optional": true, + "requires": { + "string-width": "^1.0.2 || 2" + } + }, + "wrappy": { + "version": "1.0.2", + "bundled": true, + "dev": true, + "optional": true + }, + "yallist": { + "version": "3.1.1", + "bundled": true, + "dev": true, "optional": true + } + } + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "function.prototype.name": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.1.tgz", + "integrity": "sha512-e1NzkiJuw6xqVH7YSdiW/qDHebcmMhPNe6w+4ZYYEg0VA+LaLzx37RimbPLuonHhYGFGPx1ME2nSi74JiaCr/Q==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1", + "functions-have-names": "^1.1.1", + "is-callable": "^1.1.4" + } + }, + "functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", + "dev": true + }, + "functions-have-names": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.1.1.tgz", + "integrity": "sha512-U0kNHUoxwPNPWOJaMG7Z00d4a/qZVrFtzWJRaK8V9goaVOCXBSQSJpt3MYGNtkScKEBKovxLjnNdC9MlXwo5Pw==", + "dev": true + }, + "gaxios": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-2.1.0.tgz", + "integrity": "sha512-Gtpb5sdQmb82sgVkT2GnS2n+Kx4dlFwbeMYcDlD395aEvsLCSQXJJcHt7oJ2LrGxDEAeiOkK79Zv2A8Pzt6CFg==", + "dev": true, + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^3.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + }, + "dependencies": { + "agent-base": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", + "dev": true, + "requires": { + "es6-promisify": "^5.0.0" + } + }, + "https-proxy-agent": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-3.0.1.tgz", + "integrity": "sha512-+ML2Rbh6DAuee7d07tYGEKOEi2voWPUGan+ExdPbPW6Z3svq+JCqr0v8WmKPOkz1vOVykPCBSuobe7G8GJUtVg==", + "dev": true, + "requires": { + "agent-base": "^4.3.0", + "debug": "^3.1.0" + } + }, + "is-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", + "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", + "dev": true + }, + "node-fetch": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", + "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==", + "dev": true + } + } + }, + "gcp-metadata": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-3.2.2.tgz", + "integrity": "sha512-vR7kcJMCYJG/mYWp/a1OszdOqnLB/XW1GorWW1hc1lWVNL26L497zypWb9cG0CYDQ4Bl1Wk0+fSZFFjwJlTQgQ==", + "dev": true, + "requires": { + "gaxios": "^2.1.0", + "json-bigint": "^0.3.0" + } + }, + "gcs-resumable-upload": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-2.3.1.tgz", + "integrity": "sha512-zEO7L+jz99VznQsbsF7vFTnIFbSu+CjdJqt5htnjIrfsp5j+QCVBvbbKdqpaTfCPzpUPYj1Q9O9DhIh/8newfA==", + "dev": true, + "requires": { + "abort-controller": "^3.0.0", + "configstore": "^5.0.0", + "gaxios": "^2.0.0", + "google-auth-library": "^5.0.0", + "pumpify": "^2.0.0", + "stream-events": "^1.0.4" + }, + "dependencies": { + "configstore": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.0.tgz", + "integrity": "sha512-eE/hvMs7qw7DlcB5JPRnthmrITuHMmACUJAp89v6PT6iOqzoLS7HRWhBtuHMlhNHo2AhUSA/3Dh1bKNJHcublQ==", + "dev": true, + "requires": { + "dot-prop": "^5.1.0", + "graceful-fs": "^4.1.2", + "make-dir": "^3.0.0", + "unique-string": "^2.0.0", + "write-file-atomic": "^3.0.0", + "xdg-basedir": "^4.0.0" + } + }, + "crypto-random-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==", + "dev": true + }, + "dot-prop": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.2.0.tgz", + "integrity": "sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==", + "dev": true, + "requires": { + "is-obj": "^2.0.0" + } + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "dev": true, + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "dev": true + }, + "make-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.0.0.tgz", + "integrity": "sha512-grNJDhb8b1Jm1qeqW5R/O63wUo4UXo2v2HMic6YT9i/HBlF93S8jkMgH7yugvY9ABDShH4VZMn8I+U8+fCNegw==", + "dev": true, + "requires": { + "semver": "^6.0.0" + } + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "pumpify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-2.0.1.tgz", + "integrity": "sha512-m7KOje7jZxrmutanlkS1daj1dS6z6BgslzOXmcSEpIlCxM3VJH7lG5QLeck/6hgF6F4crFf01UtQmNsJfweTAw==", + "dev": true, + "requires": { + "duplexify": "^4.1.1", + "inherits": "^2.0.3", + "pump": "^3.0.0" + } }, - "tar": { - "version": "4.4.1", - "bundled": true, - "optional": true, + "readable-stream": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", + "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", + "dev": true, "requires": { - "chownr": "^1.0.1", - "fs-minipass": "^1.2.5", - "minipass": "^2.2.4", - "minizlib": "^1.1.0", - "mkdirp": "^0.5.0", - "safe-buffer": "^5.1.1", - "yallist": "^3.0.2" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" } }, - "util-deprecate": { - "version": "1.0.2", - "bundled": true, - "optional": true + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true }, - "wide-align": { - "version": "1.1.2", - "bundled": true, - "optional": true, + "unique-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", + "dev": true, "requires": { - "string-width": "^1.0.2" + "crypto-random-string": "^2.0.0" } }, - "wrappy": { - "version": "1.0.2", - "bundled": true + "write-file-atomic": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.1.tgz", + "integrity": "sha512-JPStrIyyVJ6oCSz/691fAjFtefZ6q+fP6tm+OS4Qw6o+TGQxNp1ziY2PgS+X/m0V8OWhZiO/m4xSj+Pr4RrZvw==", + "dev": true, + "requires": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } }, - "yallist": { - "version": "3.0.2", - "bundled": true + "xdg-basedir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", + "dev": true } } }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, - "function.prototype.name": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.0.tgz", - "integrity": "sha512-Bs0VRrTz4ghD8pTmbJQD1mZ8A/mN0ur/jGz+A6FBxPDUPkm1tNfF6bhTYPA7i7aF4lZJVr+OXTNNrnnIl58Wfg==", - "dev": true, - "requires": { - "define-properties": "^1.1.2", - "function-bind": "^1.1.1", - "is-callable": "^1.1.3" - } + "gensync": { + "version": "1.0.0-beta.1", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.1.tgz", + "integrity": "sha512-r8EC6NO1sngH/zdD9fiRDLdcgnbayXah+mLgManTaIZJqEC1MZstmnox8KpnI2/fxQwrp5OpCOYWLp4rBl4Jcg==", + "dev": true }, "get-caller-file": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", - "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==" + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "get-own-enumerable-property-symbols": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", + "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==", + "dev": true }, "get-stdin": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz", - "integrity": "sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4=" + "integrity": "sha1-uWjGsKBDhDJJAui/Gl3zJXmkUP4=", + "dev": true }, "get-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", - "integrity": "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ=" + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "dev": true, + "requires": { + "pump": "^3.0.0" + }, + "dependencies": { + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + } + } }, "get-value": { "version": "2.0.6", @@ -5824,6 +9984,7 @@ "version": "0.1.7", "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "dev": true, "requires": { "assert-plus": "^1.0.0" } @@ -5832,6 +9993,7 @@ "version": "7.1.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", + "dev": true, "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -5841,60 +10003,31 @@ "path-is-absolute": "^1.0.0" } }, - "glob-base": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/glob-base/-/glob-base-0.3.0.tgz", - "integrity": "sha1-27Fk9iIbHAscz4Kuoyi0l98Oo8Q=", - "requires": { - "glob-parent": "^2.0.0", - "is-glob": "^2.0.0" - }, - "dependencies": { - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "requires": { - "is-extglob": "^1.0.0" - } - } - } - }, "glob-parent": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-2.0.0.tgz", - "integrity": "sha1-gTg9ctsFT8zPUzbaqQLxgvbtuyg=", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", + "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", + "dev": true, "requires": { - "is-glob": "^2.0.0" + "is-glob": "^4.0.1" }, "dependencies": { - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - }, "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, "requires": { - "is-extglob": "^1.0.0" + "is-extglob": "^2.1.1" } } } }, - "glob2base": { - "version": "0.0.12", - "resolved": "https://registry.npmjs.org/glob2base/-/glob2base-0.0.12.tgz", - "integrity": "sha1-nUGbPijxLoOjYhZKJ3BVkiycDVY=", - "requires": { - "find-index": "^0.1.1" - } + "glob-to-regexp": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.3.0.tgz", + "integrity": "sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs=", + "dev": true }, "global": { "version": "4.3.2", @@ -5912,76 +10045,133 @@ } } }, - "global-dirs": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-0.1.1.tgz", - "integrity": "sha1-sxnA3UYH81PzvpzKTHL8FIxJ9EU=", - "requires": { - "ini": "^1.3.4" - } - }, "global-modules": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-1.0.0.tgz", - "integrity": "sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz", + "integrity": "sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==", + "dev": true, "requires": { - "global-prefix": "^1.0.1", - "is-windows": "^1.0.1", - "resolve-dir": "^1.0.0" + "global-prefix": "^3.0.0" } }, "global-prefix": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-1.0.2.tgz", - "integrity": "sha1-2/dDxsFJklk8ZVVoy2btMsASLr4=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz", + "integrity": "sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==", + "dev": true, "requires": { - "expand-tilde": "^2.0.2", - "homedir-polyfill": "^1.0.1", - "ini": "^1.3.4", - "is-windows": "^1.0.1", - "which": "^1.2.14" + "ini": "^1.3.5", + "kind-of": "^6.0.2", + "which": "^1.3.1" } }, "globals": { - "version": "9.18.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-9.18.0.tgz", - "integrity": "sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ==" + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true }, "globby": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-5.0.0.tgz", - "integrity": "sha1-69hGZ8oNuzMLmbz8aOrCvFQ3Dg0=", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-8.0.2.tgz", + "integrity": "sha512-yTzMmKygLp8RUpG1Ymu2VXPSJQZjNAZPD4ywgYEaG7e4tBJeUQBO8OpXrf1RCNcEs5alsoJYPAMiIHP0cmeC7w==", + "dev": true, "requires": { "array-union": "^1.0.1", - "arrify": "^1.0.0", - "glob": "^7.0.3", - "object-assign": "^4.0.1", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" + "dir-glob": "2.0.0", + "fast-glob": "^2.0.2", + "glob": "^7.1.2", + "ignore": "^3.3.5", + "pify": "^3.0.0", + "slash": "^1.0.0" + }, + "dependencies": { + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "dev": true + }, + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true + }, + "slash": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", + "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=", + "dev": true + } } }, - "got": { - "version": "6.7.1", - "resolved": "http://registry.npmjs.org/got/-/got-6.7.1.tgz", - "integrity": "sha1-JAzQV4WpoY5WHcG0S0HHY+8ejbA=", + "google-auth-library": { + "version": "5.5.1", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.5.1.tgz", + "integrity": "sha512-zCtjQccWS/EHYyFdXRbfeSGM/gW+d7uMAcVnvXRnjBXON5ijo6s0nsObP0ifqileIDSbZjTlLtgo+UoN8IFJcg==", + "dev": true, "requires": { - "create-error-class": "^3.0.0", - "duplexer3": "^0.1.4", - "get-stream": "^3.0.0", - "is-redirect": "^1.0.0", - "is-retry-allowed": "^1.0.0", - "is-stream": "^1.0.0", - "lowercase-keys": "^1.0.0", - "safe-buffer": "^5.0.1", - "timed-out": "^4.0.0", - "unzip-response": "^2.0.1", - "url-parse-lax": "^1.0.0" + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "fast-text-encoding": "^1.0.0", + "gaxios": "^2.1.0", + "gcp-metadata": "^3.2.0", + "gtoken": "^4.1.0", + "jws": "^3.1.5", + "lru-cache": "^5.0.0" + }, + "dependencies": { + "arrify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==", + "dev": true + }, + "lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "requires": { + "yallist": "^3.0.2" + } + }, + "yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true + } + } + }, + "google-p12-pem": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-2.0.3.tgz", + "integrity": "sha512-Tq2kBCANxYYPxaBpTgCpRfdoPs9+/lNzc/Iaee4kuMVW5ascD+HwhpBsTLwH85C9Ev4qfB8KKHmpPQYyD2vg2w==", + "dev": true, + "requires": { + "node-forge": "^0.9.0" + }, + "dependencies": { + "node-forge": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.1.tgz", + "integrity": "sha512-G6RlQt5Sb4GMBzXvhfkeFmbqR6MzhtnT7VTHuLadjkii3rdYHNdw0m8zA4BTxVIh68FicCQ2NSUANpsqkr9jvQ==", + "dev": true + } } }, + "google-protobuf": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.10.0.tgz", + "integrity": "sha512-d0cMO8TJ6xtB/WrVHCv5U81L2ulX+aCD58IljyAN6mHwdHHJ2jbcauX5glvivi3s3hx7EYEo7eUA9WftzamMnw==" + }, "graceful-fs": { "version": "4.1.15", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.15.tgz", - "integrity": "sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA==" + "integrity": "sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA==", + "dev": true }, "graphlib": { "version": "2.1.7", @@ -6000,48 +10190,90 @@ "growly": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/growly/-/growly-1.3.0.tgz", - "integrity": "sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=" + "integrity": "sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE=", + "dev": true + }, + "grpc-web": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/grpc-web/-/grpc-web-1.0.7.tgz", + "integrity": "sha512-Fkbz1nyvvt6GC6ODcxh9Fen6LLB3OTCgGHzHwM2Eni44SUhzqPz1UQgFp9sfBEfInOhx3yBdwo9ZLjZAmJ+TtA==" + }, + "gtoken": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-4.1.3.tgz", + "integrity": "sha512-ofW+FiXjswyKdkjMcDbe6E4K7cDDdE82dGDhZIc++kUECqaE7MSErf6arJPAjcnYn1qxE1/Ti06qQuqgVusovQ==", + "dev": true, + "requires": { + "gaxios": "^2.1.0", + "google-p12-pem": "^2.0.0", + "jws": "^3.1.5", + "mime": "^2.2.0" + }, + "dependencies": { + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==", + "dev": true + } + } }, "gzip-size": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-3.0.0.tgz", - "integrity": "sha1-VGGI6b3DN/Zzdy+BZgRks4nc5SA=", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-5.1.1.tgz", + "integrity": "sha512-FNHi6mmoHvs1mxZAds4PpdCS6QG8B4C1krxJsMutgxl5t3+GlRTzzI3NEkifXx2pVsOvJdOGSmIgDhQ55FwdPA==", + "dev": true, "requires": { - "duplexer": "^0.1.1" + "duplexer": "^0.1.1", + "pify": "^4.0.1" + }, + "dependencies": { + "pify": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true + } } }, "handle-thing": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-1.2.5.tgz", - "integrity": "sha1-/Xqtcmvxpf0W38KbL3pmAdJxOcQ=" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", + "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==", + "dev": true }, "handlebars": { - "version": "4.0.12", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.0.12.tgz", - "integrity": "sha512-RhmTekP+FZL+XNhwS1Wf+bTTZpdLougwt5pcgA1tuz6Jcx0fpH/7z0qd71RKnZHBCxIRBHfBOnio4gViPemNzA==", + "version": "4.7.6", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.6.tgz", + "integrity": "sha512-1f2BACcBfiwAfStCKZNrUCgqNZkGsAT7UM3kkYtXuLo0KnaVfjKOyf7PRzB6++aK9STyT1Pd2ZCPe3EGOXleXA==", + "dev": true, "requires": { - "async": "^2.5.0", - "optimist": "^0.6.1", + "minimist": "^1.2.5", + "neo-async": "^2.6.0", "source-map": "^0.6.1", - "uglify-js": "^3.1.4" + "uglify-js": "^3.1.4", + "wordwrap": "^1.0.0" }, "dependencies": { "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true } } }, "har-schema": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "dev": true }, "har-validator": { "version": "5.1.3", "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", + "dev": true, "requires": { "ajv": "^6.5.5", "har-schema": "^2.0.0" @@ -6051,6 +10283,7 @@ "version": "6.6.2", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.6.2.tgz", "integrity": "sha512-FBHEW6Jf5TB9MGBgUUA9XHkTbjXYfAUjY43ACMfmdMRHniyoMHjHjzD50OK8LGDWQwp4rWEsIq5kEqq7rvIM1g==", + "dev": true, "requires": { "fast-deep-equal": "^2.0.1", "fast-json-stable-stringify": "^2.0.0", @@ -6061,19 +10294,28 @@ "fast-deep-equal": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" + "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=", + "dev": true }, "json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true } } }, + "harmony-reflect": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/harmony-reflect/-/harmony-reflect-1.6.1.tgz", + "integrity": "sha512-WJTeyp0JzGtHcuMsi7rw2VwtkvLa+JyfEKJCFyfcS0+CDkjQ5lHPu7zEhFZP+PDSRrEgXa5Ah0l1MbgbE41XjA==", + "dev": true + }, "has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, "requires": { "function-bind": "^1.1.1" } @@ -6082,19 +10324,30 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", + "dev": true, "requires": { "ansi-regex": "^2.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + } } }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true }, "has-symbols": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz", - "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=" + "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=", + "dev": true }, "has-value": { "version": "1.0.0", @@ -6129,34 +10382,36 @@ "version": "3.0.4", "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", "integrity": "sha1-X8hoaEfs1zSZQDMZprCj8/auSRg=", + "dev": true, "requires": { "inherits": "^2.0.1", "safe-buffer": "^5.0.1" } }, + "hash-stream-validation": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/hash-stream-validation/-/hash-stream-validation-0.2.2.tgz", + "integrity": "sha512-cMlva5CxWZOrlS/cY0C+9qAzesn5srhFA8IT1VPiHc9bWWBLkJfEUIZr7MWoi89oOOGmpg8ymchaOjiArsGu5A==", + "dev": true, + "requires": { + "through2": "^2.0.0" + } + }, "hash.js": { "version": "1.1.7", "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", + "dev": true, "requires": { "inherits": "^2.0.3", "minimalistic-assert": "^1.0.1" } }, - "hasha": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/hasha/-/hasha-2.2.0.tgz", - "integrity": "sha1-eNfL/B5tZjA/55g3NlmEUXsvbuE=", - "dev": true, - "requires": { - "is-stream": "^1.0.1", - "pinkie-promise": "^2.0.0" - } - }, "he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", - "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==" + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true }, "header-case": { "version": "1.0.1", @@ -6168,6 +10423,12 @@ "upper-case": "^1.1.3" } }, + "hex-color-regex": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/hex-color-regex/-/hex-color-regex-1.1.0.tgz", + "integrity": "sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ==", + "dev": true + }, "history": { "version": "4.7.2", "resolved": "https://registry.npmjs.org/history/-/history-4.7.2.tgz", @@ -6194,6 +10455,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", "integrity": "sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=", + "dev": true, "requires": { "hash.js": "^1.0.3", "minimalistic-assert": "^1.0.0", @@ -6213,23 +10475,6 @@ "react-is": "^16.3.2" } }, - "home-or-tmp": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/home-or-tmp/-/home-or-tmp-2.0.0.tgz", - "integrity": "sha1-42w/LSyufXRqhX440Y1fMqeILbg=", - "requires": { - "os-homedir": "^1.0.0", - "os-tmpdir": "^1.0.1" - } - }, - "homedir-polyfill": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.1.tgz", - "integrity": "sha1-TCu8inWJmP7r9e1oWA921GdotLw=", - "requires": { - "parse-passwd": "^1.0.0" - } - }, "hoopy": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/hoopy/-/hoopy-0.1.4.tgz", @@ -6239,12 +10484,14 @@ "hosted-git-info": { "version": "2.7.1", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.7.1.tgz", - "integrity": "sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w==" + "integrity": "sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w==", + "dev": true }, "hpack.js": { "version": "2.1.6", "resolved": "https://registry.npmjs.org/hpack.js/-/hpack.js-2.1.6.tgz", "integrity": "sha1-h3dMCUnlE/QuhFdbPEVoH63ioLI=", + "dev": true, "requires": { "inherits": "^2.0.1", "obuf": "^1.0.0", @@ -6252,15 +10499,46 @@ "wbuf": "^1.1.0" } }, + "hsl-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hsl-regex/-/hsl-regex-1.0.0.tgz", + "integrity": "sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4=", + "dev": true + }, + "hsla-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/hsla-regex/-/hsla-regex-1.0.0.tgz", + "integrity": "sha1-wc56MWjIxmFAM6S194d/OyJfnDg=", + "dev": true + }, "html-comment-regex": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/html-comment-regex/-/html-comment-regex-1.1.2.tgz", - "integrity": "sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ==" + "integrity": "sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ==", + "dev": true + }, + "html-element-map": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/html-element-map/-/html-element-map-1.1.0.tgz", + "integrity": "sha512-iqiG3dTZmy+uUaTmHarTL+3/A2VW9ox/9uasKEZC+R/wAtUrTcRlXPSaPqsnWPfIu8wqn09jQNwMRqzL54jSYA==", + "dev": true, + "requires": { + "array-filter": "^1.0.0" + }, + "dependencies": { + "array-filter": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-filter/-/array-filter-1.0.0.tgz", + "integrity": "sha1-uveeYubvTCpMC4MSMtr/7CUfnYM=", + "dev": true + } + } }, "html-encoding-sniffer": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz", "integrity": "sha512-71lZziiDnsuabfdYiUeWdCVyKuqwWi23L8YeIgV9jSSZHCtb6wB1BKWooH7L3tn4/FuZJMVWyNaIDr4RGmaSYw==", + "dev": true, "requires": { "whatwg-encoding": "^1.0.1" } @@ -6268,116 +10546,155 @@ "html-entities": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-1.2.1.tgz", - "integrity": "sha1-DfKTUfByEWNRXfueVUPl9u7VFi8=" + "integrity": "sha1-DfKTUfByEWNRXfueVUPl9u7VFi8=", + "dev": true }, - "html-minifier": { - "version": "3.5.21", - "resolved": "https://registry.npmjs.org/html-minifier/-/html-minifier-3.5.21.tgz", - "integrity": "sha512-LKUKwuJDhxNa3uf/LPR/KVjm/l3rBqtYeCOAekvG8F1vItxMUpueGd94i/asDDr8/1u7InxzFA5EeGjhhG5mMA==", - "requires": { - "camel-case": "3.0.x", - "clean-css": "4.2.x", - "commander": "2.17.x", - "he": "1.2.x", - "param-case": "2.1.x", - "relateurl": "0.2.x", - "uglify-js": "3.4.x" - }, - "dependencies": { - "commander": { - "version": "2.17.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.17.1.tgz", - "integrity": "sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==" - } - } + "html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true }, - "html-webpack-plugin": { - "version": "2.29.0", - "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-2.29.0.tgz", - "integrity": "sha1-6Yf0IYU9O2k4yMTIFxhC5f0XryM=", + "html-minifier-terser": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-5.0.5.tgz", + "integrity": "sha512-cBSFFghQh/uHcfSiL42KxxIRMF7A144+3E44xdlctIjxEmkEfCvouxNyFH2wysXk1fCGBPwtcr3hDWlGTfkDew==", + "dev": true, "requires": { - "bluebird": "^3.4.7", - "html-minifier": "^3.2.3", - "loader-utils": "^0.2.16", - "lodash": "^4.17.3", - "pretty-error": "^2.0.2", - "toposort": "^1.0.0" + "camel-case": "^4.1.1", + "clean-css": "^4.2.3", + "commander": "^4.1.1", + "he": "^1.2.0", + "param-case": "^3.0.3", + "relateurl": "^0.2.7", + "terser": "^4.6.3" }, "dependencies": { - "big.js": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/big.js/-/big.js-3.2.0.tgz", - "integrity": "sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q==" + "camel-case": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/camel-case/-/camel-case-4.1.1.tgz", + "integrity": "sha512-7fa2WcG4fYFkclIvEmxBbTvmibwF2/agfEBc6q3lOpVu0A13ltLsA+Hr/8Hp6kp5f+G7hKi6t8lys6XxP+1K6Q==", + "dev": true, + "requires": { + "pascal-case": "^3.1.1", + "tslib": "^1.10.0" + } }, - "json5": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-0.5.1.tgz", - "integrity": "sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE=" + "commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true }, - "loader-utils": { - "version": "0.2.17", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-0.2.17.tgz", - "integrity": "sha1-+G5jdNQyBabmxg6RlvF8Apm/s0g=", + "dot-case": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.3.tgz", + "integrity": "sha512-7hwEmg6RiSQfm/GwPL4AAWXKy3YNNZA3oFv2Pdiey0mwkRCPZ9x6SZbkLcn8Ma5PYeVokzoD4Twv2n7LKp5WeA==", + "dev": true, + "requires": { + "no-case": "^3.0.3", + "tslib": "^1.10.0" + } + }, + "lower-case": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.1.tgz", + "integrity": "sha512-LiWgfDLLb1dwbFQZsSglpRj+1ctGnayXz3Uv0/WO8n558JycT5fg6zkNcnW0G68Nn0aEldTFeEfmjCfmqry/rQ==", + "dev": true, + "requires": { + "tslib": "^1.10.0" + } + }, + "no-case": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.3.tgz", + "integrity": "sha512-ehY/mVQCf9BL0gKfsJBvFJen+1V//U+0HQMPrWct40ixE4jnv0bfvxDbWtAHL9EcaPEOJHVVYKoQn1TlZUB8Tw==", + "dev": true, + "requires": { + "lower-case": "^2.0.1", + "tslib": "^1.10.0" + } + }, + "param-case": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/param-case/-/param-case-3.0.3.tgz", + "integrity": "sha512-VWBVyimc1+QrzappRs7waeN2YmoZFCGXWASRYX1/rGHtXqEcrGEIDm+jqIwFa2fRXNgQEwrxaYuIrX0WcAguTA==", + "dev": true, + "requires": { + "dot-case": "^3.0.3", + "tslib": "^1.10.0" + } + }, + "pascal-case": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/pascal-case/-/pascal-case-3.1.1.tgz", + "integrity": "sha512-XIeHKqIrsquVTQL2crjq3NfJUxmdLasn3TYOU0VBM+UX2a6ztAWBlJQBePLGY7VHW8+2dRadeIPK5+KImwTxQA==", + "dev": true, "requires": { - "big.js": "^3.1.3", - "emojis-list": "^2.0.0", - "json5": "^0.5.0", - "object-assign": "^4.0.1" + "no-case": "^3.0.3", + "tslib": "^1.10.0" } + }, + "tslib": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.1.tgz", + "integrity": "sha512-aZW88SY8kQbU7gpV19lN24LtXh/yD4ZZg6qieAJDDg+YBsJcSmLGK9QpnUjAKVG/xefmvJGd1WUmfpT/g6AJGA==", + "dev": true } } }, + "html-webpack-plugin": { + "version": "4.0.0-beta.11", + "resolved": "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-4.0.0-beta.11.tgz", + "integrity": "sha512-4Xzepf0qWxf8CGg7/WQM5qBB2Lc/NFI7MhU59eUDTkuQp3skZczH4UA1d6oQyDEIoMDgERVhRyTdtUPZ5s5HBg==", + "dev": true, + "requires": { + "html-minifier-terser": "^5.0.1", + "loader-utils": "^1.2.3", + "lodash": "^4.17.15", + "pretty-error": "^2.1.1", + "tapable": "^1.1.3", + "util.promisify": "1.0.0" + } + }, "htmlparser2": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.3.0.tgz", - "integrity": "sha1-zHDQWln2VC5D8OaFyYLhTJJKnv4=", + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.10.1.tgz", + "integrity": "sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ==", + "dev": true, "requires": { - "domelementtype": "1", - "domhandler": "2.1", - "domutils": "1.1", - "readable-stream": "1.0" + "domelementtype": "^1.3.1", + "domhandler": "^2.3.0", + "domutils": "^1.5.1", + "entities": "^1.1.1", + "inherits": "^2.0.1", + "readable-stream": "^3.1.1" }, "dependencies": { - "domutils": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.1.6.tgz", - "integrity": "sha1-vdw94Jm5ou+sxRxiPyj0FuzFdIU=", - "requires": { - "domelementtype": "1" - } - }, - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" - }, "readable-stream": { - "version": "1.0.34", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", - "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" } - }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" } } }, "http-deceiver": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz", - "integrity": "sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc=" + "integrity": "sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc=", + "dev": true }, "http-errors": { "version": "1.6.3", "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz", "integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=", + "dev": true, "requires": { "depd": "~1.1.2", "inherits": "2.0.3", @@ -6386,9 +10703,10 @@ } }, "http-parser-js": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.0.tgz", - "integrity": "sha512-cZdEF7r4gfRIq7ezX9J0T+kQmJNOub71dWbgAXVHDct80TKP4MCETtZQ31xyv38UwgzkWPYF/Xc0ge55dW9Z9w==" + "version": "0.4.10", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.4.10.tgz", + "integrity": "sha1-ksnBN0w1CF912zWexWzCV8u5P6Q=", + "dev": true }, "http-proxy": { "version": "1.17.0", @@ -6400,6 +10718,16 @@ "requires-port": "^1.0.0" } }, + "http-proxy-agent": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-2.1.0.tgz", + "integrity": "sha512-qwHbBLV7WviBl0rQsOzH6o5lwyOIvwp/BdFnvVxXORldu5TmjFfjzBcWUWS5kWAZhmv+JtiDhSuQCp4sBfbIgg==", + "dev": true, + "requires": { + "agent-base": "4", + "debug": "3.1.0" + } + }, "http-proxy-middleware": { "version": "0.19.1", "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz", @@ -6415,6 +10743,7 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "dev": true, "requires": { "assert-plus": "^1.0.0", "jsprim": "^1.2.2", @@ -6424,17 +10753,8 @@ "https-browserify": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/https-browserify/-/https-browserify-1.0.0.tgz", - "integrity": "sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=" - }, - "https-proxy-agent": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-2.2.1.tgz", - "integrity": "sha512-HPCTS1LW51bcyMYbxUIOO4HEOlQ1/1qRaFWcyxvwaqUS9TY88aoEuHUY33kuAh1YhVVaDQhLZsnPd+XNARWZlQ==", - "dev": true, - "requires": { - "agent-base": "^4.1.0", - "debug": "^3.1.0" - } + "integrity": "sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=", + "dev": true }, "hyphenate-style-name": { "version": "1.0.2", @@ -6449,17 +10769,22 @@ "safer-buffer": ">= 2.1.2 < 3" } }, - "icss-replace-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz", - "integrity": "sha1-Bupvg2ead0njhs/h/oEq5dsiPe0=" - }, "icss-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-2.1.0.tgz", - "integrity": "sha1-g/Cg7DeL8yRheLbCrZE28TWxyWI=", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-4.1.1.tgz", + "integrity": "sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA==", + "dev": true, + "requires": { + "postcss": "^7.0.14" + } + }, + "identity-obj-proxy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/identity-obj-proxy/-/identity-obj-proxy-3.0.0.tgz", + "integrity": "sha1-lNK9qWCERT7zb7xarsN+D3nx/BQ=", + "dev": true, "requires": { - "postcss": "^6.0.1" + "harmony-reflect": "^1.4.6" } }, "ieee754": { @@ -6470,31 +10795,117 @@ "iferr": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/iferr/-/iferr-0.1.5.tgz", - "integrity": "sha1-xg7taebY/bazEEofy8ocGS3FtQE=" + "integrity": "sha1-xg7taebY/bazEEofy8ocGS3FtQE=", + "dev": true + }, + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true }, "immer": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/immer/-/immer-1.10.0.tgz", "integrity": "sha512-O3sR1/opvCDGLEVcvrGTMtLac8GJ5IwZC4puPrLuRj3l7ICKvkmA0vGuU9OW8mV9WIBRnaxp5GJh9IEAaNOoYg==" }, - "import-lazy": { + "import-cwd": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/import-cwd/-/import-cwd-2.1.0.tgz", + "integrity": "sha1-qmzzbnInYShcs3HsZRn1PiQ1sKk=", + "dev": true, + "requires": { + "import-from": "^2.1.0" + } + }, + "import-fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz", + "integrity": "sha1-2BNVwVYS04bGH53dOSLUMEgipUY=", + "dev": true, + "requires": { + "caller-path": "^2.0.0", + "resolve-from": "^3.0.0" + } + }, + "import-from": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", - "integrity": "sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM=" + "resolved": "https://registry.npmjs.org/import-from/-/import-from-2.1.0.tgz", + "integrity": "sha1-M1238qev/VOqpHHUuAId7ja387E=", + "dev": true, + "requires": { + "resolve-from": "^3.0.0" + } }, "import-local": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-1.0.0.tgz", - "integrity": "sha512-vAaZHieK9qjGo58agRBg+bhHX3hoTZU/Oa3GESWLz7t1U62fk63aHuDJJEteXoDeTCcPmUT+z38gkHPZkkmpmQ==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-2.0.0.tgz", + "integrity": "sha512-b6s04m3O+s3CGSbqDIyP4R6aAwAeYlVq9+WUWep6iHa8ETRf9yei1U48C5MmfJmV9AiLYYBKPMq/W+/WRpQmCQ==", + "dev": true, "requires": { - "pkg-dir": "^2.0.0", + "pkg-dir": "^3.0.0", "resolve-cwd": "^2.0.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "dev": true, + "requires": { + "find-up": "^3.0.0" + } + } } }, "imurmurhash": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=" + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true }, "indefinite-observable": { "version": "1.0.2", @@ -6508,6 +10919,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-2.1.0.tgz", "integrity": "sha1-ji1INIdCEhtKghi3oTfppSBJ3IA=", + "dev": true, "requires": { "repeating": "^2.0.0" } @@ -6515,17 +10927,20 @@ "indexes-of": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/indexes-of/-/indexes-of-1.0.1.tgz", - "integrity": "sha1-8w9xbI4r00bHtn0985FVZqfAVgc=" + "integrity": "sha1-8w9xbI4r00bHtn0985FVZqfAVgc=", + "dev": true }, - "indexof": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", - "integrity": "sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10=" + "infer-owner": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", + "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", + "dev": true }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, "requires": { "once": "^1.3.0", "wrappy": "1" @@ -6534,71 +10949,219 @@ "inherits": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", + "dev": true }, "ini": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.5.tgz", - "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==" + "integrity": "sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==", + "dev": true }, "inquirer": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-3.3.0.tgz", - "integrity": "sha512-h+xtnyk4EwKvFWHrUYsWErEVR+igKtLdchu+o0Z1RL7VU/jVMFbYir2bp6bAj8efFNxWqHX0dIss6fJQ+/+qeQ==", + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.1.0.tgz", + "integrity": "sha512-5fJMWEmikSYu0nv/flMc475MhGbB7TSPd/2IpFV4I4rMklboCH2rQjYY5kKiYGHqUF9gvaambupcJFFG9dvReg==", + "dev": true, "requires": { - "ansi-escapes": "^3.0.0", - "chalk": "^2.0.0", - "cli-cursor": "^2.1.0", + "ansi-escapes": "^4.2.1", + "chalk": "^3.0.0", + "cli-cursor": "^3.1.0", "cli-width": "^2.0.0", - "external-editor": "^2.0.4", - "figures": "^2.0.0", - "lodash": "^4.3.0", - "mute-stream": "0.0.7", - "run-async": "^2.2.0", - "rx-lite": "^4.0.8", - "rx-lite-aggregates": "^4.0.8", - "string-width": "^2.1.0", - "strip-ansi": "^4.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.15", + "mute-stream": "0.0.8", + "run-async": "^2.4.0", + "rxjs": "^6.5.3", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", "through": "^2.3.6" }, "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + "ansi-escapes": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", + "integrity": "sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==", + "dev": true, + "requires": { + "type-fest": "^0.11.0" + } + }, + "ansi-styles": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", + "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "dev": true, + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } }, "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", + "integrity": "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==", + "dev": true, "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" } }, - "strip-ansi": { + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, "requires": { - "ansi-regex": "^3.0.0" + "ansi-regex": "^5.0.0" + } + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" } + }, + "type-fest": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", + "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", + "dev": true } } }, "internal-ip": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/internal-ip/-/internal-ip-1.2.0.tgz", - "integrity": "sha1-rp+/k7mEh4eF1QqN4bNWlWBYz1w=", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/internal-ip/-/internal-ip-4.3.0.tgz", + "integrity": "sha512-S1zBo1D6zcsyuC6PMmY5+55YMILQ9av8lotMx447Bq6SAgo/sDK6y6uUKmuYhW7eacnIhFfsPmCNYdDzsnnDCg==", + "dev": true, "requires": { - "meow": "^3.3.0" + "default-gateway": "^4.2.0", + "ipaddr.js": "^1.9.0" } }, - "interpret": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.2.0.tgz", - "integrity": "sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw==" + "internal-slot": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.2.tgz", + "integrity": "sha512-2cQNfwhAfJIkU4KZPkDI+Gj5yNNnbqi40W9Gge6dfnk4TocEVm00B3bdiL+JINrbGJil2TeHvM4rETGzk/f/0g==", + "dev": true, + "requires": { + "es-abstract": "^1.17.0-next.1", + "has": "^1.0.3", + "side-channel": "^1.0.2" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "string.prototype.trimleft": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", + "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimstart": "^1.0.0" + } + }, + "string.prototype.trimright": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", + "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimend": "^1.0.0" + } + } + } }, "invariant": { "version": "2.2.4", @@ -6609,30 +11172,34 @@ } }, "invert-kv": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", - "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-2.0.0.tgz", + "integrity": "sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==", + "dev": true }, "ip": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.5.tgz", - "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=" + "integrity": "sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=", + "dev": true }, "ip-regex": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-1.0.3.tgz", - "integrity": "sha1-3FiQdvZZ9BnCIgOaMzFvHHOH7/0=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", + "integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=", "dev": true }, "ipaddr.js": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.8.0.tgz", - "integrity": "sha1-6qM9bd16zo9/b+DJygRA5wZzix4=" + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "dev": true }, "is-absolute-url": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-2.1.0.tgz", - "integrity": "sha1-UFMN+4T8yap9vnhS6Do3uTufKqY=" + "integrity": "sha1-UFMN+4T8yap9vnhS6Do3uTufKqY=", + "dev": true }, "is-accessor-descriptor": { "version": "0.1.6", @@ -6655,12 +11222,14 @@ "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", - "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=" + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true }, "is-binary-path": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", + "dev": true, "requires": { "binary-extensions": "^1.0.0" } @@ -6680,6 +11249,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-builtin-module/-/is-builtin-module-1.0.0.tgz", "integrity": "sha1-VAVy0096wxGfj3bDDLwbHgN6/74=", + "dev": true, "requires": { "builtin-modules": "^1.0.0" } @@ -6687,14 +11257,30 @@ "is-callable": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", - "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==" + "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", + "dev": true + }, + "is-ci": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", + "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", + "dev": true, + "requires": { + "ci-info": "^2.0.0" + } }, - "is-ci": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-1.2.1.tgz", - "integrity": "sha512-s6tfsaQaQi3JNciBH6shVqEDvhGut0SUXr31ag8Pd8BBbVVlcGfWhpPmEOoM6RJ5TFhbypvf5yyRw/VXW1IiWg==", + "is-color-stop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-color-stop/-/is-color-stop-1.1.0.tgz", + "integrity": "sha1-z/9HGu5N1cnhWFmPvhKWe1za00U=", + "dev": true, "requires": { - "ci-info": "^1.5.0" + "css-color-names": "^0.0.4", + "hex-color-regex": "^1.1.0", + "hsl-regex": "^1.0.0", + "hsla-regex": "^1.0.0", + "rgb-regex": "^1.0.1", + "rgba-regex": "^1.0.0" } }, "is-data-descriptor": { @@ -6718,7 +11304,8 @@ "is-date-object": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", - "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=" + "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", + "dev": true }, "is-descriptor": { "version": "0.1.6", @@ -6740,20 +11327,14 @@ "is-directory": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/is-directory/-/is-directory-0.3.1.tgz", - "integrity": "sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE=" - }, - "is-dotfile": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-dotfile/-/is-dotfile-1.0.3.tgz", - "integrity": "sha1-pqLzL/0t+wT1yiXs0Pa4PPeYoeE=" + "integrity": "sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE=", + "dev": true }, - "is-equal-shallow": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz", - "integrity": "sha1-IjgJj8Ih3gvPpdnqxMRdY4qhxTQ=", - "requires": { - "is-primitive": "^2.0.0" - } + "is-docker": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.0.0.tgz", + "integrity": "sha512-pJEdRugimx4fBMra5z2/5iRdZ63OhYV0vr0Dwm5+xtW4D1FvRkB8hamMIhnWfyJeDdyr/aa7BDyNbtG38VxgoQ==", + "dev": true }, "is-extendable": { "version": "0.1.1", @@ -6769,25 +11350,22 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-finite/-/is-finite-1.0.2.tgz", "integrity": "sha1-zGZ3aVYCvlUO8R6LSqYwU0K20Ko=", + "dev": true, "requires": { "number-is-nan": "^1.0.0" } }, "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" - }, - "is-function": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.1.tgz", - "integrity": "sha1-Es+5i2W1fdPRk6MSH19uL0N2ArU=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true }, "is-generator-fn": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-1.0.0.tgz", - "integrity": "sha1-lp1J4bszKfa7fwkIm+JleLLd1Go=" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true }, "is-glob": { "version": "4.0.0", @@ -6802,15 +11380,6 @@ "resolved": "https://registry.npmjs.org/is-in-browser/-/is-in-browser-1.1.3.tgz", "integrity": "sha1-Vv9NtoOgeMYILrldrX3GLh0E+DU=" }, - "is-installed-globally": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.1.0.tgz", - "integrity": "sha1-Df2Y9akRFxbdU13aZJL2e/PSWoA=", - "requires": { - "global-dirs": "^0.1.0", - "is-path-inside": "^1.0.0" - } - }, "is-lower-case": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/is-lower-case/-/is-lower-case-1.1.3.tgz", @@ -6820,11 +11389,6 @@ "lower-case": "^1.1.0" } }, - "is-npm": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-1.0.0.tgz", - "integrity": "sha1-8vtjpl5JBbQGyGBydloaTceTufQ=" - }, "is-number": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", @@ -6850,35 +11414,40 @@ "dev": true }, "is-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", - "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "dev": true }, "is-path-cwd": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-1.0.0.tgz", - "integrity": "sha1-0iXsIxMuie3Tj9p2dHLmLmXxEG0=" + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", + "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==", + "dev": true }, "is-path-in-cwd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-1.0.1.tgz", - "integrity": "sha512-FjV1RTW48E7CWM7eE/J2NJvAEEVektecDBVBE5Hh3nM1Jd0kvhHtX68Pr3xsDf857xt3Y4AkwVULK1Vku62aaQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-path-in-cwd/-/is-path-in-cwd-2.1.0.tgz", + "integrity": "sha512-rNocXHgipO+rvnP6dk3zI20RpOtrAM/kzbB258Uw5BWr3TpXi861yzjo16Dn4hUox07iw5AyeMLHWsujkjzvRQ==", + "dev": true, "requires": { - "is-path-inside": "^1.0.0" + "is-path-inside": "^2.1.0" } }, "is-path-inside": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-1.0.1.tgz", - "integrity": "sha1-jvW33lBDej/cprToZe96pVy0gDY=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-2.1.0.tgz", + "integrity": "sha512-wiyhTzfDWsvwAW53OBWF5zuvaOGlZ6PwYxAbPVDhpm+gM09xKQGjBq/8uYN12aDvMxnAnq3dxTyoSoRNmg5YFg==", + "dev": true, "requires": { - "path-is-inside": "^1.0.1" + "path-is-inside": "^1.0.2" } }, "is-plain-obj": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=" + "integrity": "sha1-caUMhCnfync8kqOQpKA7OfzVHT4=", + "dev": true }, "is-plain-object": { "version": "2.0.4", @@ -6888,43 +11457,38 @@ "isobject": "^3.0.1" } }, - "is-posix-bracket": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz", - "integrity": "sha1-MzTceXdDaOkvAW5vvAqI9c1ua8Q=" - }, - "is-primitive": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-2.0.0.tgz", - "integrity": "sha1-IHurkWOEmcB7Kt8kCkGochADRXU=" - }, "is-promise": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", - "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=" - }, - "is-redirect": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-redirect/-/is-redirect-1.0.0.tgz", - "integrity": "sha1-HQPd7VO9jbDzDCbk+V02/HyH3CQ=" + "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", + "dev": true }, "is-regex": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=", + "dev": true, "requires": { "has": "^1.0.1" } }, - "is-retry-allowed": { + "is-regexp": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", + "integrity": "sha1-/S2INUXEa6xaYz57mgnof6LLUGk=", + "dev": true + }, + "is-resolvable": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz", - "integrity": "sha1-EaBgVotnM5REAz0BJaYaINVk+zQ=" + "resolved": "https://registry.npmjs.org/is-resolvable/-/is-resolvable-1.1.0.tgz", + "integrity": "sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg==", + "dev": true }, "is-root": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-root/-/is-root-1.0.0.tgz", - "integrity": "sha1-B7bCM7w5TNnQK6FclmvWZg1jQtU=" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz", + "integrity": "sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==", + "dev": true }, "is-stream": { "version": "1.1.0", @@ -6944,9 +11508,10 @@ "dev": true }, "is-svg": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-svg/-/is-svg-2.1.0.tgz", - "integrity": "sha1-z2EJDaDZ77yrhyLeum8DIgjbsOk=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-svg/-/is-svg-3.0.0.tgz", + "integrity": "sha512-gi4iHK53LR2ujhLVVj+37Ykh9GLqYHX6JOVXbLAucaG/Cqw9xwdFOjDM2qeifLs1sF1npXXFvDu0r5HNgCMrzQ==", + "dev": true, "requires": { "html-comment-regex": "^1.1.0" } @@ -6955,6 +11520,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz", "integrity": "sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw==", + "dev": true, "requires": { "has-symbols": "^1.0.0" } @@ -6962,7 +11528,8 @@ "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "dev": true }, "is-upper-case": { "version": "1.1.2", @@ -6976,7 +11543,8 @@ "is-utf8": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", - "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=" + "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", + "dev": true }, "is-windows": { "version": "1.0.2", @@ -6986,7 +11554,8 @@ "is-wsl": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-1.1.0.tgz", - "integrity": "sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=" + "integrity": "sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0=", + "dev": true }, "isarray": { "version": "1.0.0", @@ -6996,7 +11565,8 @@ "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=" + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true }, "isobject": { "version": "3.0.1", @@ -7015,933 +11585,692 @@ "isstream": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" - }, - "istanbul-api": { - "version": "1.3.7", - "resolved": "https://registry.npmjs.org/istanbul-api/-/istanbul-api-1.3.7.tgz", - "integrity": "sha512-4/ApBnMVeEPG3EkSzcw25wDe4N66wxwn+KKn6b47vyek8Xb3NBAcg4xfuQbS7BqcZuTX4wxfD5lVagdggR3gyA==", - "requires": { - "async": "^2.1.4", - "fileset": "^2.0.2", - "istanbul-lib-coverage": "^1.2.1", - "istanbul-lib-hook": "^1.2.2", - "istanbul-lib-instrument": "^1.10.2", - "istanbul-lib-report": "^1.1.5", - "istanbul-lib-source-maps": "^1.2.6", - "istanbul-reports": "^1.5.1", - "js-yaml": "^3.7.0", - "mkdirp": "^0.5.1", - "once": "^1.4.0" - } + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", + "dev": true }, "istanbul-lib-coverage": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-1.2.1.tgz", - "integrity": "sha512-PzITeunAgyGbtY1ibVIUiV679EFChHjoMNRibEIobvmrCRaIgwLxNucOSimtNWUhEib/oO7QY2imD75JVgCJWQ==" - }, - "istanbul-lib-hook": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-1.2.2.tgz", - "integrity": "sha512-/Jmq7Y1VeHnZEQ3TL10VHyb564mn6VrQXHchON9Jf/AEcmQ3ZIiyD1BVzNOKTZf/G3gE+kiGK6SmpF9y3qGPLw==", - "requires": { - "append-transform": "^0.4.0" - } + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.5.tgz", + "integrity": "sha512-8aXznuEPCJvGnMSRft4udDRDtb1V3pkQkMMI5LI+6HuQz5oQ4J2UFn1H82raA3qJtyOLkkwVqICBQkjnGtn5mA==", + "dev": true }, "istanbul-lib-instrument": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-1.10.2.tgz", - "integrity": "sha512-aWHxfxDqvh/ZlxR8BBaEPVSWDPUkGD63VjGQn3jcw8jCp7sHEMKcrj4xfJn/ABzdMEHiQNyvDQhqm5o8+SQg7A==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-3.3.0.tgz", + "integrity": "sha512-5nnIN4vo5xQZHdXno/YDXJ0G+I3dAm4XgzfSVTPLQpj/zAV2dV6Juy0yaf10/zrJOJeHoN3fraFe+XRq2bFVZA==", + "dev": true, "requires": { - "babel-generator": "^6.18.0", - "babel-template": "^6.16.0", - "babel-traverse": "^6.18.0", - "babel-types": "^6.18.0", - "babylon": "^6.18.0", - "istanbul-lib-coverage": "^1.2.1", - "semver": "^5.3.0" + "@babel/generator": "^7.4.0", + "@babel/parser": "^7.4.3", + "@babel/template": "^7.4.0", + "@babel/traverse": "^7.4.3", + "@babel/types": "^7.4.0", + "istanbul-lib-coverage": "^2.0.5", + "semver": "^6.0.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } } }, "istanbul-lib-report": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-1.1.5.tgz", - "integrity": "sha512-UsYfRMoi6QO/doUshYNqcKJqVmFe9w51GZz8BS3WB0lYxAllQYklka2wP9+dGZeHYaWIdcXUx8JGdbqaoXRXzw==", + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-2.0.8.tgz", + "integrity": "sha512-fHBeG573EIihhAblwgxrSenp0Dby6tJMFR/HvlerBsrCTD5bkUuoNtn3gVh29ZCS824cGGBPn7Sg7cNk+2xUsQ==", + "dev": true, "requires": { - "istanbul-lib-coverage": "^1.2.1", - "mkdirp": "^0.5.1", - "path-parse": "^1.0.5", - "supports-color": "^3.1.2" + "istanbul-lib-coverage": "^2.0.5", + "make-dir": "^2.1.0", + "supports-color": "^6.1.0" }, "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "dev": true, "requires": { - "has-flag": "^1.0.0" + "has-flag": "^3.0.0" } } } }, "istanbul-lib-source-maps": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.6.tgz", - "integrity": "sha512-TtbsY5GIHgbMsMiRw35YBHGpZ1DVFEO19vxxeiDMYaeOFOCzfnYVxvl6pOUIZR4dtPhAGpSMup8OyF8ubsaqEg==", + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-3.0.6.tgz", + "integrity": "sha512-R47KzMtDJH6X4/YW9XTx+jrLnZnscW4VpNN+1PViSYTejLVPWv7oov+Duf8YQSPyVRUvueQqz1TcsC6mooZTXw==", + "dev": true, "requires": { - "debug": "^3.1.0", - "istanbul-lib-coverage": "^1.2.1", - "mkdirp": "^0.5.1", - "rimraf": "^2.6.1", - "source-map": "^0.5.3" + "debug": "^4.1.1", + "istanbul-lib-coverage": "^2.0.5", + "make-dir": "^2.1.0", + "rimraf": "^2.6.3", + "source-map": "^0.6.1" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } } }, "istanbul-reports": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-1.5.1.tgz", - "integrity": "sha512-+cfoZ0UXzWjhAdzosCPP3AN8vvef8XDkWtTfgaN+7L3YTpNYITnCaEkceo5SEYy644VkHka/P1FvkWvrG/rrJw==", + "version": "2.2.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-2.2.7.tgz", + "integrity": "sha512-uu1F/L1o5Y6LzPVSVZXNOoD/KXpJue9aeLRd0sM9uMXfZvzomB0WxVamWb5ue8kA2vVWEmW7EG+A5n3f1kqHKg==", + "dev": true, "requires": { - "handlebars": "^4.0.3" + "html-escaper": "^2.0.0" } }, "jest": { - "version": "22.4.2", - "resolved": "http://registry.npmjs.org/jest/-/jest-22.4.2.tgz", - "integrity": "sha512-wD7dXWtfaQAgbNVsjFqzmuhg6nzwGsTRVea3FpSJ7GURhG+J536fw4mdoLB01DgiEozDDeF1ZMR/UlUszTsCrg==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-24.9.0.tgz", + "integrity": "sha512-YvkBL1Zm7d2B1+h5fHEOdyjCG+sGMz4f8D86/0HiqJ6MB4MnDc8FgP5vdWsGnemOQro7lnYo8UakZ3+5A0jxGw==", + "dev": true, "requires": { - "import-local": "^1.0.0", - "jest-cli": "^22.4.2" + "import-local": "^2.0.0", + "jest-cli": "^24.9.0" }, "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" - }, - "arr-diff": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-2.0.0.tgz", - "integrity": "sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8=", - "requires": { - "arr-flatten": "^1.0.1" - } - }, - "array-unique": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz", - "integrity": "sha1-odl8yvy8JiXMcPrc6zalDFiwGlM=" - }, - "braces": { - "version": "1.8.5", - "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.5.tgz", - "integrity": "sha1-uneWLhLf+WnWt2cR6RS3N4V79qc=", - "requires": { - "expand-range": "^1.8.1", - "preserve": "^0.2.0", - "repeat-element": "^1.1.2" - } - }, - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "expand-brackets": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz", - "integrity": "sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s=", - "requires": { - "is-posix-bracket": "^0.1.0" - } - }, - "extglob": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.2.tgz", - "integrity": "sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE=", - "requires": { - "is-extglob": "^1.0.0" - } - }, - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "requires": { - "is-extglob": "^1.0.0" - } - }, "jest-cli": { - "version": "22.4.4", - "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-22.4.4.tgz", - "integrity": "sha512-I9dsgkeyjVEEZj9wrGrqlH+8OlNob9Iptyl+6L5+ToOLJmHm4JwOPatin1b2Bzp5R5YRQJ+oiedx7o1H7wJzhA==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-24.9.0.tgz", + "integrity": "sha512-+VLRKyitT3BWoMeSUIHRxV/2g8y9gw91Jh5z2UmXZzkZKpbC08CSehVxgHUwTpy+HwGcns/tqafQDJW7imYvGg==", + "dev": true, "requires": { - "ansi-escapes": "^3.0.0", + "@jest/core": "^24.9.0", + "@jest/test-result": "^24.9.0", + "@jest/types": "^24.9.0", "chalk": "^2.0.1", "exit": "^0.1.2", - "glob": "^7.1.2", - "graceful-fs": "^4.1.11", - "import-local": "^1.0.0", - "is-ci": "^1.0.10", - "istanbul-api": "^1.1.14", - "istanbul-lib-coverage": "^1.1.1", - "istanbul-lib-instrument": "^1.8.0", - "istanbul-lib-source-maps": "^1.2.1", - "jest-changed-files": "^22.2.0", - "jest-config": "^22.4.4", - "jest-environment-jsdom": "^22.4.1", - "jest-get-type": "^22.1.0", - "jest-haste-map": "^22.4.2", - "jest-message-util": "^22.4.0", - "jest-regex-util": "^22.1.0", - "jest-resolve-dependencies": "^22.1.0", - "jest-runner": "^22.4.4", - "jest-runtime": "^22.4.4", - "jest-snapshot": "^22.4.0", - "jest-util": "^22.4.1", - "jest-validate": "^22.4.4", - "jest-worker": "^22.2.2", - "micromatch": "^2.3.11", - "node-notifier": "^5.2.1", - "realpath-native": "^1.0.0", - "rimraf": "^2.5.4", - "slash": "^1.0.0", - "string-length": "^2.0.0", - "strip-ansi": "^4.0.0", - "which": "^1.2.12", - "yargs": "^10.0.3" - } - }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - }, - "micromatch": { - "version": "2.3.11", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz", - "integrity": "sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU=", - "requires": { - "arr-diff": "^2.0.0", - "array-unique": "^0.2.1", - "braces": "^1.8.2", - "expand-brackets": "^0.1.4", - "extglob": "^0.3.1", - "filename-regex": "^2.0.0", - "is-extglob": "^1.0.0", - "is-glob": "^2.0.1", - "kind-of": "^3.0.2", - "normalize-path": "^2.0.1", - "object.omit": "^2.0.0", - "parse-glob": "^3.0.4", - "regex-cache": "^0.4.2" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "requires": { - "ansi-regex": "^3.0.0" + "import-local": "^2.0.0", + "is-ci": "^2.0.0", + "jest-config": "^24.9.0", + "jest-util": "^24.9.0", + "jest-validate": "^24.9.0", + "prompts": "^2.0.1", + "realpath-native": "^1.1.0", + "yargs": "^13.3.0" } } } }, "jest-changed-files": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-22.4.3.tgz", - "integrity": "sha512-83Dh0w1aSkUNFhy5d2dvqWxi/y6weDwVVLU6vmK0cV9VpRxPzhTeGimbsbRDSnEoszhF937M4sDLLeS7Cu/Tmw==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-24.9.0.tgz", + "integrity": "sha512-6aTWpe2mHF0DhL28WjdkO8LyGjs3zItPET4bMSeXU6T3ub4FPMw+mcOcbdGXQOAfmLcxofD23/5Bl9Z4AkFwqg==", + "dev": true, "requires": { + "@jest/types": "^24.9.0", + "execa": "^1.0.0", "throat": "^4.0.0" } }, "jest-config": { - "version": "22.4.4", - "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-22.4.4.tgz", - "integrity": "sha512-9CKfo1GC4zrXSoMLcNeDvQBfgtqGTB1uP8iDIZ97oB26RCUb886KkKWhVcpyxVDOUxbhN+uzcBCeFe7w+Iem4A==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-24.9.0.tgz", + "integrity": "sha512-RATtQJtVYQrp7fvWg6f5y3pEFj9I+H8sWw4aKxnDZ96mob5i5SD6ZEGWgMLXQ4LE8UurrjbdlLWdUeo+28QpfQ==", + "dev": true, "requires": { + "@babel/core": "^7.1.0", + "@jest/test-sequencer": "^24.9.0", + "@jest/types": "^24.9.0", + "babel-jest": "^24.9.0", "chalk": "^2.0.1", "glob": "^7.1.1", - "jest-environment-jsdom": "^22.4.1", - "jest-environment-node": "^22.4.1", - "jest-get-type": "^22.1.0", - "jest-jasmine2": "^22.4.4", - "jest-regex-util": "^22.1.0", - "jest-resolve": "^22.4.2", - "jest-util": "^22.4.1", - "jest-validate": "^22.4.4", - "pretty-format": "^22.4.0" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } + "jest-environment-jsdom": "^24.9.0", + "jest-environment-node": "^24.9.0", + "jest-get-type": "^24.9.0", + "jest-jasmine2": "^24.9.0", + "jest-regex-util": "^24.3.0", + "jest-resolve": "^24.9.0", + "jest-util": "^24.9.0", + "jest-validate": "^24.9.0", + "micromatch": "^3.1.10", + "pretty-format": "^24.9.0", + "realpath-native": "^1.1.0" } }, "jest-diff": { - "version": "22.4.3", - "resolved": "http://registry.npmjs.org/jest-diff/-/jest-diff-22.4.3.tgz", - "integrity": "sha512-/QqGvCDP5oZOF6PebDuLwrB2BMD8ffJv6TAGAdEVuDx1+uEgrHpSFrfrOiMRx2eJ1hgNjlQrOQEHetVwij90KA==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-24.9.0.tgz", + "integrity": "sha512-qMfrTs8AdJE2iqrTp0hzh7kTd2PQWrsFyj9tORoKmu32xjPjeE4NyjVRDz8ybYwqS2ik8N4hsIpiVTyFeo2lBQ==", + "dev": true, "requires": { "chalk": "^2.0.1", - "diff": "^3.2.0", - "jest-get-type": "^22.4.3", - "pretty-format": "^22.4.3" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } + "diff-sequences": "^24.9.0", + "jest-get-type": "^24.9.0", + "pretty-format": "^24.9.0" } }, "jest-docblock": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-22.4.3.tgz", - "integrity": "sha512-uPKBEAw7YrEMcXueMKZXn/rbMxBiSv48fSqy3uEnmgOlQhSX+lthBqHb1fKWNVmFqAp9E/RsSdBfiV31LbzaOg==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-24.9.0.tgz", + "integrity": "sha512-F1DjdpDMJMA1cN6He0FNYNZlo3yYmOtRUnktrT9Q37njYzC5WEaDdmbynIgy0L/IvXvvgsG8OsqhLPXTpfmZAA==", + "dev": true, "requires": { "detect-newline": "^2.1.0" } }, - "jest-environment-jsdom": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-22.4.3.tgz", - "integrity": "sha512-FviwfR+VyT3Datf13+ULjIMO5CSeajlayhhYQwpzgunswoaLIPutdbrnfUHEMyJCwvqQFaVtTmn9+Y8WCt6n1w==", + "jest-each": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-24.9.0.tgz", + "integrity": "sha512-ONi0R4BvW45cw8s2Lrx8YgbeXL1oCQ/wIDwmsM3CqM/nlblNCPmnC3IPQlMbRFZu3wKdQ2U8BqM6lh3LJ5Bsog==", + "dev": true, "requires": { - "jest-mock": "^22.4.3", - "jest-util": "^22.4.3", - "jsdom": "^11.5.1" + "@jest/types": "^24.9.0", + "chalk": "^2.0.1", + "jest-get-type": "^24.9.0", + "jest-util": "^24.9.0", + "pretty-format": "^24.9.0" } }, - "jest-environment-node": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-22.4.3.tgz", - "integrity": "sha512-reZl8XF6t/lMEuPWwo9OLfttyC26A5AMgDyEQ6DBgZuyfyeNUzYT8BFo6uxCCP/Av/b7eb9fTi3sIHFPBzmlRA==", + "jest-environment-jsdom": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-24.9.0.tgz", + "integrity": "sha512-Zv9FV9NBRzLuALXjvRijO2351DRQeLYXtpD4xNvfoVFw21IOKNhZAEUKcbiEtjTkm2GsJ3boMVgkaR7rN8qetA==", + "dev": true, "requires": { - "jest-mock": "^22.4.3", - "jest-util": "^22.4.3" + "@jest/environment": "^24.9.0", + "@jest/fake-timers": "^24.9.0", + "@jest/types": "^24.9.0", + "jest-mock": "^24.9.0", + "jest-util": "^24.9.0", + "jsdom": "^11.5.1" } }, - "jest-get-type": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-22.4.3.tgz", - "integrity": "sha512-/jsz0Y+V29w1chdXVygEKSz2nBoHoYqNShPe+QgxSNjAuP1i8+k4LbQNrfoliKej0P45sivkSCh7yiD6ubHS3w==" - }, - "jest-haste-map": { - "version": "22.4.3", - "resolved": "http://registry.npmjs.org/jest-haste-map/-/jest-haste-map-22.4.3.tgz", - "integrity": "sha512-4Q9fjzuPVwnaqGKDpIsCSoTSnG3cteyk2oNVjBX12HHOaF1oxql+uUiqZb5Ndu7g/vTZfdNwwy4WwYogLh29DQ==", + "jest-environment-jsdom-fourteen": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/jest-environment-jsdom-fourteen/-/jest-environment-jsdom-fourteen-1.0.1.tgz", + "integrity": "sha512-DojMX1sY+at5Ep+O9yME34CdidZnO3/zfPh8UW+918C5fIZET5vCjfkegixmsi7AtdYfkr4bPlIzmWnlvQkP7Q==", + "dev": true, "requires": { - "fb-watchman": "^2.0.0", - "graceful-fs": "^4.1.11", - "jest-docblock": "^22.4.3", - "jest-serializer": "^22.4.3", - "jest-worker": "^22.4.3", - "micromatch": "^2.3.11", - "sane": "^2.0.0" + "@jest/environment": "^24.3.0", + "@jest/fake-timers": "^24.3.0", + "@jest/types": "^24.3.0", + "jest-mock": "^24.0.0", + "jest-util": "^24.0.0", + "jsdom": "^14.1.0" }, "dependencies": { - "arr-diff": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-2.0.0.tgz", - "integrity": "sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8=", - "requires": { - "arr-flatten": "^1.0.1" - } - }, - "array-unique": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz", - "integrity": "sha1-odl8yvy8JiXMcPrc6zalDFiwGlM=" - }, - "braces": { - "version": "1.8.5", - "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.5.tgz", - "integrity": "sha1-uneWLhLf+WnWt2cR6RS3N4V79qc=", - "requires": { - "expand-range": "^1.8.1", - "preserve": "^0.2.0", - "repeat-element": "^1.1.2" - } - }, - "expand-brackets": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz", - "integrity": "sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s=", - "requires": { - "is-posix-bracket": "^0.1.0" - } + "acorn": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", + "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==", + "dev": true }, - "extglob": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.2.tgz", - "integrity": "sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE=", + "jsdom": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-14.1.0.tgz", + "integrity": "sha512-O901mfJSuTdwU2w3Sn+74T+RnDVP+FuV5fH8tcPWyqrseRAb0s5xOtPgCFiPOtLcyK7CLIJwPyD83ZqQWvA5ng==", + "dev": true, "requires": { - "is-extglob": "^1.0.0" + "abab": "^2.0.0", + "acorn": "^6.0.4", + "acorn-globals": "^4.3.0", + "array-equal": "^1.0.0", + "cssom": "^0.3.4", + "cssstyle": "^1.1.1", + "data-urls": "^1.1.0", + "domexception": "^1.0.1", + "escodegen": "^1.11.0", + "html-encoding-sniffer": "^1.0.2", + "nwsapi": "^2.1.3", + "parse5": "5.1.0", + "pn": "^1.1.0", + "request": "^2.88.0", + "request-promise-native": "^1.0.5", + "saxes": "^3.1.9", + "symbol-tree": "^3.2.2", + "tough-cookie": "^2.5.0", + "w3c-hr-time": "^1.0.1", + "w3c-xmlserializer": "^1.1.2", + "webidl-conversions": "^4.0.2", + "whatwg-encoding": "^1.0.5", + "whatwg-mimetype": "^2.3.0", + "whatwg-url": "^7.0.0", + "ws": "^6.1.2", + "xml-name-validator": "^3.0.0" } }, - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "requires": { - "is-extglob": "^1.0.0" - } + "parse5": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.0.tgz", + "integrity": "sha512-fxNG2sQjHvlVAYmzBZS9YlDp6PTSSDwa98vkD4QgVDDCAo84z5X1t5XyJQ62ImdLXx5NdIIfihey6xpum9/gRQ==", + "dev": true }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "whatwg-url": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-7.1.0.tgz", + "integrity": "sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==", + "dev": true, "requires": { - "is-buffer": "^1.1.5" + "lodash.sortby": "^4.7.0", + "tr46": "^1.0.1", + "webidl-conversions": "^4.0.2" } }, - "micromatch": { - "version": "2.3.11", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz", - "integrity": "sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU=", + "ws": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.1.tgz", + "integrity": "sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==", + "dev": true, "requires": { - "arr-diff": "^2.0.0", - "array-unique": "^0.2.1", - "braces": "^1.8.2", - "expand-brackets": "^0.1.4", - "extglob": "^0.3.1", - "filename-regex": "^2.0.0", - "is-extglob": "^1.0.0", - "is-glob": "^2.0.1", - "kind-of": "^3.0.2", - "normalize-path": "^2.0.1", - "object.omit": "^2.0.0", - "parse-glob": "^3.0.4", - "regex-cache": "^0.4.2" + "async-limiter": "~1.0.0" } } } }, + "jest-environment-node": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-24.9.0.tgz", + "integrity": "sha512-6d4V2f4nxzIzwendo27Tr0aFm+IXWa0XEUnaH6nU0FMaozxovt+sfRvh4J47wL1OvF83I3SSTu0XK+i4Bqe7uA==", + "dev": true, + "requires": { + "@jest/environment": "^24.9.0", + "@jest/fake-timers": "^24.9.0", + "@jest/types": "^24.9.0", + "jest-mock": "^24.9.0", + "jest-util": "^24.9.0" + } + }, + "jest-get-type": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-24.9.0.tgz", + "integrity": "sha512-lUseMzAley4LhIcpSP9Jf+fTrQ4a1yHQwLNeeVa2cEmbCGeoZAtYPOIv8JaxLD/sUpKxetKGP+gsHl8f8TSj8Q==", + "dev": true + }, + "jest-haste-map": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-24.9.0.tgz", + "integrity": "sha512-kfVFmsuWui2Sj1Rp1AJ4D9HqJwE4uwTlS/vO+eRUaMmd54BFpli2XhMQnPC2k4cHFVbB2Q2C+jtI1AGLgEnCjQ==", + "dev": true, + "requires": { + "@jest/types": "^24.9.0", + "anymatch": "^2.0.0", + "fb-watchman": "^2.0.0", + "fsevents": "^1.2.7", + "graceful-fs": "^4.1.15", + "invariant": "^2.2.4", + "jest-serializer": "^24.9.0", + "jest-util": "^24.9.0", + "jest-worker": "^24.9.0", + "micromatch": "^3.1.10", + "sane": "^4.0.3", + "walker": "^1.0.7" + } + }, "jest-jasmine2": { - "version": "22.4.4", - "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-22.4.4.tgz", - "integrity": "sha512-nK3vdUl50MuH7vj/8at7EQVjPGWCi3d5+6aCi7Gxy/XMWdOdbH1qtO/LjKbqD8+8dUAEH+BVVh7HkjpCWC1CSw==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-24.9.0.tgz", + "integrity": "sha512-Cq7vkAgaYKp+PsX+2/JbTarrk0DmNhsEtqBXNwUHkdlbrTBLtMJINADf2mf5FkowNsq8evbPc07/qFO0AdKTzw==", + "dev": true, "requires": { + "@babel/traverse": "^7.1.0", + "@jest/environment": "^24.9.0", + "@jest/test-result": "^24.9.0", + "@jest/types": "^24.9.0", "chalk": "^2.0.1", "co": "^4.6.0", - "expect": "^22.4.0", - "graceful-fs": "^4.1.11", - "is-generator-fn": "^1.0.0", - "jest-diff": "^22.4.0", - "jest-matcher-utils": "^22.4.0", - "jest-message-util": "^22.4.0", - "jest-snapshot": "^22.4.0", - "jest-util": "^22.4.1", - "source-map-support": "^0.5.0" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } + "expect": "^24.9.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^24.9.0", + "jest-matcher-utils": "^24.9.0", + "jest-message-util": "^24.9.0", + "jest-runtime": "^24.9.0", + "jest-snapshot": "^24.9.0", + "jest-util": "^24.9.0", + "pretty-format": "^24.9.0", + "throat": "^4.0.0" } }, "jest-leak-detector": { - "version": "22.4.3", - "resolved": "http://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-22.4.3.tgz", - "integrity": "sha512-NZpR/Ls7+ndO57LuXROdgCGz2RmUdC541tTImL9bdUtU3WadgFGm0yV+Ok4Fuia/1rLAn5KaJ+i76L6e3zGJYQ==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-24.9.0.tgz", + "integrity": "sha512-tYkFIDsiKTGwb2FG1w8hX9V0aUb2ot8zY/2nFg087dUageonw1zrLMP4W6zsRO59dPkTSKie+D4rhMuP9nRmrA==", + "dev": true, "requires": { - "pretty-format": "^22.4.3" + "jest-get-type": "^24.9.0", + "pretty-format": "^24.9.0" } }, "jest-matcher-utils": { - "version": "22.4.3", - "resolved": "http://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-22.4.3.tgz", - "integrity": "sha512-lsEHVaTnKzdAPR5t4B6OcxXo9Vy4K+kRRbG5gtddY8lBEC+Mlpvm1CJcsMESRjzUhzkz568exMV1hTB76nAKbA==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-24.9.0.tgz", + "integrity": "sha512-OZz2IXsu6eaiMAwe67c1T+5tUAtQyQx27/EMEkbFAGiw52tB9em+uGbzpcgYVpA8wl0hlxKPZxrly4CXU/GjHA==", + "dev": true, "requires": { "chalk": "^2.0.1", - "jest-get-type": "^22.4.3", - "pretty-format": "^22.4.3" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } + "jest-diff": "^24.9.0", + "jest-get-type": "^24.9.0", + "pretty-format": "^24.9.0" } }, "jest-message-util": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-22.4.3.tgz", - "integrity": "sha512-iAMeKxhB3Se5xkSjU0NndLLCHtP4n+GtCqV0bISKA5dmOXQfEbdEmYiu2qpnWBDCQdEafNDDU6Q+l6oBMd/+BA==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-24.9.0.tgz", + "integrity": "sha512-oCj8FiZ3U0hTP4aSui87P4L4jC37BtQwUMqk+zk/b11FR19BJDeZsZAvIHutWnmtw7r85UmR3CEWZ0HWU2mAlw==", + "dev": true, "requires": { - "@babel/code-frame": "^7.0.0-beta.35", + "@babel/code-frame": "^7.0.0", + "@jest/test-result": "^24.9.0", + "@jest/types": "^24.9.0", + "@types/stack-utils": "^1.0.1", "chalk": "^2.0.1", - "micromatch": "^2.3.11", - "slash": "^1.0.0", + "micromatch": "^3.1.10", + "slash": "^2.0.0", "stack-utils": "^1.0.1" - }, - "dependencies": { - "arr-diff": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-2.0.0.tgz", - "integrity": "sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8=", - "requires": { - "arr-flatten": "^1.0.1" - } - }, - "array-unique": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz", - "integrity": "sha1-odl8yvy8JiXMcPrc6zalDFiwGlM=" - }, - "braces": { - "version": "1.8.5", - "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.5.tgz", - "integrity": "sha1-uneWLhLf+WnWt2cR6RS3N4V79qc=", - "requires": { - "expand-range": "^1.8.1", - "preserve": "^0.2.0", - "repeat-element": "^1.1.2" - } - }, - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "expand-brackets": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz", - "integrity": "sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s=", - "requires": { - "is-posix-bracket": "^0.1.0" - } - }, - "extglob": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.2.tgz", - "integrity": "sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE=", - "requires": { - "is-extglob": "^1.0.0" - } - }, - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "requires": { - "is-extglob": "^1.0.0" - } - }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - }, - "micromatch": { - "version": "2.3.11", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz", - "integrity": "sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU=", - "requires": { - "arr-diff": "^2.0.0", - "array-unique": "^0.2.1", - "braces": "^1.8.2", - "expand-brackets": "^0.1.4", - "extglob": "^0.3.1", - "filename-regex": "^2.0.0", - "is-extglob": "^1.0.0", - "is-glob": "^2.0.1", - "kind-of": "^3.0.2", - "normalize-path": "^2.0.1", - "object.omit": "^2.0.0", - "parse-glob": "^3.0.4", - "regex-cache": "^0.4.2" - } - } } }, "jest-mock": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-22.4.3.tgz", - "integrity": "sha512-+4R6mH5M1G4NK16CKg9N1DtCaFmuxhcIqF4lQK/Q1CIotqMs/XBemfpDPeVZBFow6iyUNu6EBT9ugdNOTT5o5Q==" + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-24.9.0.tgz", + "integrity": "sha512-3BEYN5WbSq9wd+SyLDES7AHnjH9A/ROBwmz7l2y+ol+NtSFO8DYiEBzoO1CeFc9a8DYy10EO4dDFVv/wN3zl1w==", + "dev": true, + "requires": { + "@jest/types": "^24.9.0" + } + }, + "jest-pnp-resolver": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.1.tgz", + "integrity": "sha512-pgFw2tm54fzgYvc/OHrnysABEObZCUNFnhjoRjaVOCN8NYc032/gVjPaHD4Aq6ApkSieWtfKAFQtmDKAmhupnQ==", + "dev": true }, "jest-regex-util": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-22.4.3.tgz", - "integrity": "sha512-LFg1gWr3QinIjb8j833bq7jtQopiwdAs67OGfkPrvy7uNUbVMfTXXcOKXJaeY5GgjobELkKvKENqq1xrUectWg==" + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-24.9.0.tgz", + "integrity": "sha512-05Cmb6CuxaA+Ys6fjr3PhvV3bGQmO+2p2La4hFbU+W5uOc479f7FdLXUWXw4pYMAhhSZIuKHwSXSu6CsSBAXQA==", + "dev": true }, "jest-resolve": { - "version": "22.4.3", - "resolved": "http://registry.npmjs.org/jest-resolve/-/jest-resolve-22.4.3.tgz", - "integrity": "sha512-u3BkD/MQBmwrOJDzDIaxpyqTxYH+XqAXzVJP51gt29H8jpj3QgKof5GGO2uPGKGeA1yTMlpbMs1gIQ6U4vcRhw==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-24.9.0.tgz", + "integrity": "sha512-TaLeLVL1l08YFZAt3zaPtjiVvyy4oSA6CRe+0AFPPVX3Q/VI0giIWWoAvoS5L96vj9Dqxj4fB5p2qrHCmTU/MQ==", + "dev": true, "requires": { - "browser-resolve": "^1.11.2", - "chalk": "^2.0.1" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } + "@jest/types": "^24.9.0", + "browser-resolve": "^1.11.3", + "chalk": "^2.0.1", + "jest-pnp-resolver": "^1.2.1", + "realpath-native": "^1.1.0" } }, "jest-resolve-dependencies": { - "version": "22.4.3", - "resolved": "http://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-22.4.3.tgz", - "integrity": "sha512-06czCMVToSN8F2U4EvgSB1Bv/56gc7MpCftZ9z9fBgUQM7dzHGCMBsyfVA6dZTx8v0FDcnALf7hupeQxaBCvpA==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-24.9.0.tgz", + "integrity": "sha512-Fm7b6AlWnYhT0BXy4hXpactHIqER7erNgIsIozDXWl5dVm+k8XdGVe1oTg1JyaFnOxarMEbax3wyRJqGP2Pq+g==", + "dev": true, "requires": { - "jest-regex-util": "^22.4.3" + "@jest/types": "^24.9.0", + "jest-regex-util": "^24.3.0", + "jest-snapshot": "^24.9.0" } }, "jest-runner": { - "version": "22.4.4", - "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-22.4.4.tgz", - "integrity": "sha512-5S/OpB51igQW9xnkM5Tgd/7ZjiAuIoiJAVtvVTBcEBiXBIFzWM3BAMPBM19FX68gRV0KWyFuGKj0EY3M3aceeQ==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-24.9.0.tgz", + "integrity": "sha512-KksJQyI3/0mhcfspnxxEOBueGrd5E4vV7ADQLT9ESaCzz02WnbdbKWIf5Mkaucoaj7obQckYPVX6JJhgUcoWWg==", + "dev": true, "requires": { + "@jest/console": "^24.7.1", + "@jest/environment": "^24.9.0", + "@jest/test-result": "^24.9.0", + "@jest/types": "^24.9.0", + "chalk": "^2.4.2", "exit": "^0.1.2", - "jest-config": "^22.4.4", - "jest-docblock": "^22.4.0", - "jest-haste-map": "^22.4.2", - "jest-jasmine2": "^22.4.4", - "jest-leak-detector": "^22.4.0", - "jest-message-util": "^22.4.0", - "jest-runtime": "^22.4.4", - "jest-util": "^22.4.1", - "jest-worker": "^22.2.2", + "graceful-fs": "^4.1.15", + "jest-config": "^24.9.0", + "jest-docblock": "^24.3.0", + "jest-haste-map": "^24.9.0", + "jest-jasmine2": "^24.9.0", + "jest-leak-detector": "^24.9.0", + "jest-message-util": "^24.9.0", + "jest-resolve": "^24.9.0", + "jest-runtime": "^24.9.0", + "jest-util": "^24.9.0", + "jest-worker": "^24.6.0", + "source-map-support": "^0.5.6", "throat": "^4.0.0" } }, "jest-runtime": { - "version": "22.4.4", - "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-22.4.4.tgz", - "integrity": "sha512-WRTj9m///npte1YjuphCYX7GRY/c2YvJImU9t7qOwFcqHr4YMzmX6evP/3Sehz5DKW2Vi8ONYPCFWe36JVXxfw==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-24.9.0.tgz", + "integrity": "sha512-8oNqgnmF3v2J6PVRM2Jfuj8oX3syKmaynlDMMKQ4iyzbQzIG6th5ub/lM2bCMTmoTKM3ykcUYI2Pw9xwNtjMnw==", + "dev": true, "requires": { - "babel-core": "^6.0.0", - "babel-jest": "^22.4.4", - "babel-plugin-istanbul": "^4.1.5", + "@jest/console": "^24.7.1", + "@jest/environment": "^24.9.0", + "@jest/source-map": "^24.3.0", + "@jest/transform": "^24.9.0", + "@jest/types": "^24.9.0", + "@types/yargs": "^13.0.0", "chalk": "^2.0.1", - "convert-source-map": "^1.4.0", "exit": "^0.1.2", - "graceful-fs": "^4.1.11", - "jest-config": "^22.4.4", - "jest-haste-map": "^22.4.2", - "jest-regex-util": "^22.1.0", - "jest-resolve": "^22.4.2", - "jest-util": "^22.4.1", - "jest-validate": "^22.4.4", - "json-stable-stringify": "^1.0.1", - "micromatch": "^2.3.11", - "realpath-native": "^1.0.0", - "slash": "^1.0.0", - "strip-bom": "3.0.0", - "write-file-atomic": "^2.1.0", - "yargs": "^10.0.3" + "glob": "^7.1.3", + "graceful-fs": "^4.1.15", + "jest-config": "^24.9.0", + "jest-haste-map": "^24.9.0", + "jest-message-util": "^24.9.0", + "jest-mock": "^24.9.0", + "jest-regex-util": "^24.3.0", + "jest-resolve": "^24.9.0", + "jest-snapshot": "^24.9.0", + "jest-util": "^24.9.0", + "jest-validate": "^24.9.0", + "realpath-native": "^1.1.0", + "slash": "^2.0.0", + "strip-bom": "^3.0.0", + "yargs": "^13.3.0" }, "dependencies": { - "arr-diff": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-2.0.0.tgz", - "integrity": "sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8=", - "requires": { - "arr-flatten": "^1.0.1" - } - }, - "array-unique": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz", - "integrity": "sha1-odl8yvy8JiXMcPrc6zalDFiwGlM=" - }, - "braces": { - "version": "1.8.5", - "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.5.tgz", - "integrity": "sha1-uneWLhLf+WnWt2cR6RS3N4V79qc=", - "requires": { - "expand-range": "^1.8.1", - "preserve": "^0.2.0", - "repeat-element": "^1.1.2" - } - }, - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "expand-brackets": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz", - "integrity": "sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s=", - "requires": { - "is-posix-bracket": "^0.1.0" - } - }, - "extglob": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.2.tgz", - "integrity": "sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE=", - "requires": { - "is-extglob": "^1.0.0" - } - }, - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "requires": { - "is-extglob": "^1.0.0" - } - }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "requires": { - "is-buffer": "^1.1.5" - } - }, - "micromatch": { - "version": "2.3.11", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz", - "integrity": "sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU=", - "requires": { - "arr-diff": "^2.0.0", - "array-unique": "^0.2.1", - "braces": "^1.8.2", - "expand-brackets": "^0.1.4", - "extglob": "^0.3.1", - "filename-regex": "^2.0.0", - "is-extglob": "^1.0.0", - "is-glob": "^2.0.1", - "kind-of": "^3.0.2", - "normalize-path": "^2.0.1", - "object.omit": "^2.0.0", - "parse-glob": "^3.0.4", - "regex-cache": "^0.4.2" - } - }, "strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=" + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true } } }, "jest-serializer": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-22.4.3.tgz", - "integrity": "sha512-uPaUAppx4VUfJ0QDerpNdF43F68eqKWCzzhUlKNDsUPhjOon7ZehR4C809GCqh765FoMRtTVUVnGvIoskkYHiw==" + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-24.9.0.tgz", + "integrity": "sha512-DxYipDr8OvfrKH3Kel6NdED3OXxjvxXZ1uIY2I9OFbGg+vUkkg7AGvi65qbhbWNPvDckXmzMPbK3u3HaDO49bQ==", + "dev": true }, "jest-snapshot": { - "version": "22.4.3", - "resolved": "http://registry.npmjs.org/jest-snapshot/-/jest-snapshot-22.4.3.tgz", - "integrity": "sha512-JXA0gVs5YL0HtLDCGa9YxcmmV2LZbwJ+0MfyXBBc5qpgkEYITQFJP7XNhcHFbUvRiniRpRbGVfJrOoYhhGE0RQ==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-24.9.0.tgz", + "integrity": "sha512-uI/rszGSs73xCM0l+up7O7a40o90cnrk429LOiK3aeTvfC0HHmldbd81/B7Ix81KSFe1lwkbl7GnBGG4UfuDew==", + "dev": true, "requires": { + "@babel/types": "^7.0.0", + "@jest/types": "^24.9.0", "chalk": "^2.0.1", - "jest-diff": "^22.4.3", - "jest-matcher-utils": "^22.4.3", + "expect": "^24.9.0", + "jest-diff": "^24.9.0", + "jest-get-type": "^24.9.0", + "jest-matcher-utils": "^24.9.0", + "jest-message-util": "^24.9.0", + "jest-resolve": "^24.9.0", "mkdirp": "^0.5.1", "natural-compare": "^1.4.0", - "pretty-format": "^22.4.3" + "pretty-format": "^24.9.0", + "semver": "^6.2.0" }, "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true } } }, "jest-util": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-22.4.3.tgz", - "integrity": "sha512-rfDfG8wyC5pDPNdcnAlZgwKnzHvZDu8Td2NJI/jAGKEGxJPYiE4F0ss/gSAkG4778Y23Hvbz+0GMrDJTeo7RjQ==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-24.9.0.tgz", + "integrity": "sha512-x+cZU8VRmOJxbA1K5oDBdxQmdq0OIdADarLxk0Mq+3XS4jgvhG/oKGWcIDCtPG0HgjxOYvF+ilPJQsAyXfbNOg==", + "dev": true, "requires": { - "callsites": "^2.0.0", + "@jest/console": "^24.9.0", + "@jest/fake-timers": "^24.9.0", + "@jest/source-map": "^24.9.0", + "@jest/test-result": "^24.9.0", + "@jest/types": "^24.9.0", + "callsites": "^3.0.0", "chalk": "^2.0.1", - "graceful-fs": "^4.1.11", - "is-ci": "^1.0.10", - "jest-message-util": "^22.4.3", + "graceful-fs": "^4.1.15", + "is-ci": "^2.0.0", "mkdirp": "^0.5.1", + "slash": "^2.0.0", "source-map": "^0.6.0" }, "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true } } }, "jest-validate": { - "version": "22.4.4", - "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-22.4.4.tgz", - "integrity": "sha512-dmlf4CIZRGvkaVg3fa0uetepcua44DHtktHm6rcoNVtYlpwe6fEJRkMFsaUVcFHLzbuBJ2cPw9Gl9TKfnzMVwg==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-24.9.0.tgz", + "integrity": "sha512-HPIt6C5ACwiqSiwi+OfSSHbK8sG7akG8eATl+IPKaeIjtPOeBUd/g3J7DghugzxrGjI93qS/+RPKe1H6PqvhRQ==", + "dev": true, "requires": { + "@jest/types": "^24.9.0", + "camelcase": "^5.3.1", "chalk": "^2.0.1", - "jest-config": "^22.4.4", - "jest-get-type": "^22.1.0", - "leven": "^2.1.0", - "pretty-format": "^22.4.0" + "jest-get-type": "^24.9.0", + "leven": "^3.1.0", + "pretty-format": "^24.9.0" + } + }, + "jest-watch-typeahead": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/jest-watch-typeahead/-/jest-watch-typeahead-0.4.2.tgz", + "integrity": "sha512-f7VpLebTdaXs81rg/oj4Vg/ObZy2QtGzAmGLNsqUS5G5KtSN68tFcIsbvNODfNyQxU78g7D8x77o3bgfBTR+2Q==", + "dev": true, + "requires": { + "ansi-escapes": "^4.2.1", + "chalk": "^2.4.1", + "jest-regex-util": "^24.9.0", + "jest-watcher": "^24.3.0", + "slash": "^3.0.0", + "string-length": "^3.1.0", + "strip-ansi": "^5.0.0" }, "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", + "ansi-escapes": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", + "integrity": "sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==", + "dev": true, "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "type-fest": "^0.11.0" + } + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true + }, + "string-length": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-3.1.0.tgz", + "integrity": "sha512-Ttp5YvkGm5v9Ijagtaz1BnN+k9ObpvS0eIBblPMp2YWL8FBmi9qblQ9fexc2k/CXFgrTIteU3jAw3payCnwSTA==", + "dev": true, + "requires": { + "astral-regex": "^1.0.0", + "strip-ansi": "^5.2.0" + } + }, + "type-fest": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", + "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", + "dev": true + } + } + }, + "jest-watcher": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-24.9.0.tgz", + "integrity": "sha512-+/fLOfKPXXYJDYlks62/4R4GoT+GU1tYZed99JSCOsmzkkF7727RqKrjNAxtfO4YpGv11wybgRvCjR73lK2GZw==", + "dev": true, + "requires": { + "@jest/test-result": "^24.9.0", + "@jest/types": "^24.9.0", + "@types/yargs": "^13.0.0", + "ansi-escapes": "^3.0.0", + "chalk": "^2.0.1", + "jest-util": "^24.9.0", + "string-length": "^2.0.0" + } + }, + "jest-worker": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-24.9.0.tgz", + "integrity": "sha512-51PE4haMSXcHohnSMdM42anbvZANYTqMrr52tVKPqqsPJMzoP6FYYDVqahX/HrAoKEKz3uUPzSvKs9A3qR4iVw==", + "dev": true, + "requires": { + "merge-stream": "^2.0.0", + "supports-color": "^6.1.0" + }, + "dependencies": { + "supports-color": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" } } } }, - "jest-worker": { - "version": "22.4.3", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-22.4.3.tgz", - "integrity": "sha512-B1ucW4fI8qVAuZmicFxI1R3kr2fNeYJyvIQ1rKcuLYnenFV5K5aMbxFj6J0i00Ju83S8jP2d7Dz14+AvbIHRYQ==", - "requires": { - "merge-stream": "^1.0.1" - } - }, - "jimp": { - "version": "0.2.28", - "resolved": "https://registry.npmjs.org/jimp/-/jimp-0.2.28.tgz", - "integrity": "sha1-3VKak3GQ9ClXp5N9Gsw6d2KZbqI=", - "dev": true, - "requires": { - "bignumber.js": "^2.1.0", - "bmp-js": "0.0.3", - "es6-promise": "^3.0.2", - "exif-parser": "^0.1.9", - "file-type": "^3.1.0", - "jpeg-js": "^0.2.0", - "load-bmfont": "^1.2.3", - "mime": "^1.3.4", - "mkdirp": "0.5.1", - "pixelmatch": "^4.0.0", - "pngjs": "^3.0.0", - "read-chunk": "^1.0.1", - "request": "^2.65.0", - "stream-to-buffer": "^0.1.0", - "tinycolor2": "^1.1.2", - "url-regex": "^3.0.0" - }, - "dependencies": { - "es6-promise": { - "version": "3.3.1", - "resolved": "http://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", - "integrity": "sha1-oIzd6EzNvzTQJ6FFG8kdS80ophM=", - "dev": true - } - } - }, - "jpeg-js": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/jpeg-js/-/jpeg-js-0.2.0.tgz", - "integrity": "sha1-U+RI7J0mPmgyZkZ+lELSxaLvVII=", - "dev": true - }, - "js-base64": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/js-base64/-/js-base64-2.5.0.tgz", - "integrity": "sha512-wlEBIZ5LP8usDylWbDNhKPEFVFdI5hCHpnVoT/Ysvoi/PRhJENm/Rlh9TvjYB38HFfKZN7OzEbRjmjvLkFw11g==" - }, "js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, "js-yaml": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.12.0.tgz", - "integrity": "sha512-PIt2cnwmPfL4hKNwqeiuz4bKfnzHTBv6HyVgjahA6mPLwPDzjDWrplJBMjHUFxku/N3FlmrbyPclad+I+4mJ3A==", + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", "requires": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -7950,12 +12279,14 @@ "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", + "dev": true }, "jsdom": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-11.12.0.tgz", "integrity": "sha512-y8Px43oyiBM13Zc1z780FrfNLJCXTL40EWlty/LXUtcjykRBNgLlCjWXpfSPBl2iv+N7koQN+dvqszHZgT/Fjw==", + "dev": true, "requires": { "abab": "^2.0.0", "acorn": "^5.5.3", @@ -7983,58 +12314,98 @@ "whatwg-url": "^6.4.1", "ws": "^5.2.0", "xml-name-validator": "^3.0.0" + }, + "dependencies": { + "acorn": { + "version": "5.7.4", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.4.tgz", + "integrity": "sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==", + "dev": true + } } }, "jsesc": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-1.3.0.tgz", - "integrity": "sha1-RsP+yMGJKxKwgz25vHYiF226s0s=" + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true }, - "json-loader": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/json-loader/-/json-loader-0.5.7.tgz", - "integrity": "sha512-QLPs8Dj7lnf3e3QYS1zkCo+4ZwqOiF9d/nZnYozTISxXWCfNs9yuky5rJw4/W34s7POaNlbZmQGaB5NiXCbP4w==" + "json-bigint": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-0.3.0.tgz", + "integrity": "sha1-DM2RLEuCcNBfBW+9E4FLU9OCWx4=", + "dev": true, + "requires": { + "bignumber.js": "^7.0.0" + }, + "dependencies": { + "bignumber.js": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-7.2.1.tgz", + "integrity": "sha512-S4XzBk5sMB+Rcb/LNcpzXr57VRTxgAvaAEDAl1AwRx27j00hT84O6OkteE7u8UB3NuaaygCRrEpqox4uDOrbdQ==", + "dev": true + } + } + }, + "json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true }, "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", + "dev": true }, "json-schema-traverse": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", - "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=" + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true }, "json-stable-stringify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz", "integrity": "sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=", + "dev": true, "requires": { "jsonify": "~0.0.0" } }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true + }, "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", + "dev": true }, "json3": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/json3/-/json3-3.3.2.tgz", - "integrity": "sha1-PAQ0dD35Pi9cQq7nsZvLSDV19OE=" + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/json3/-/json3-3.3.3.tgz", + "integrity": "sha512-c7/8mbUsKigAbLkD5B010BK4D9LZm7A1pNItkEwiUZRpIN66exu/e7YQWysGun+TRKaJp8MhemM+VkfWv42aCA==", + "dev": true }, "json5": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "dev": true, "requires": { "minimist": "^1.2.0" } }, "jsonfile": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-3.0.1.tgz", - "integrity": "sha1-pezG9l9T9mLEQVx2daAzHQmS7GY=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", + "dev": true, "requires": { "graceful-fs": "^4.1.6" } @@ -8042,12 +12413,14 @@ "jsonify": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", - "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=" + "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=", + "dev": true }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "dev": true, "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", @@ -8124,37 +12497,36 @@ "css-vendor": "^0.3.8" } }, - "jump.js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/jump.js/-/jump.js-1.0.2.tgz", - "integrity": "sha1-4GQbR/QKOPITnCX9oFAL8o5DAVo=", - "dev": true + "jsx-ast-utils": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-2.2.3.tgz", + "integrity": "sha512-EdIHFMm+1BPynpKOpdPqiOsvnIrInRGJD7bzPZdPkjitQEqpdpUuFpq4T0npZFKTiB3RhWFdGN+oqOJIdhDhQA==", + "dev": true, + "requires": { + "array-includes": "^3.0.3", + "object.assign": "^4.1.0" + } }, - "junit-report-builder": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/junit-report-builder/-/junit-report-builder-1.3.1.tgz", - "integrity": "sha512-KTueBpPsmjfiyrAxxhKlEMwXb3aRmDHG5tRYwtRF3ujLQ7/e/5MH3b2p9ND2P84rU8z5dQq40vWJv6TtEdS16Q==", + "jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", "dev": true, "requires": { - "date-format": "0.0.2", - "lodash": "^4.17.10", - "mkdirp": "^0.5.0", - "xmlbuilder": "^10.0.0" - }, - "dependencies": { - "xmlbuilder": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-10.1.1.tgz", - "integrity": "sha512-OyzrcFLL/nb6fMGHbiRDuPup9ljBycsdCypwuyg5AAHvyWzGfChJpCXMG88AGTIMFhGZ9RccFN1e6lhg3hkwKg==", - "dev": true - } + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" } }, - "kew": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/kew/-/kew-0.7.0.tgz", - "integrity": "sha1-edk9LTM2PW/dKXCzNdkUGtWR15s=", - "dev": true + "jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dev": true, + "requires": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } }, "keycode": { "version": "2.2.0", @@ -8164,40 +12536,43 @@ "killable": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/killable/-/killable-1.0.1.tgz", - "integrity": "sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg==" + "integrity": "sha512-LzqtLKlUwirEUyl/nicirVmNiPvYs7l5n8wOPP7fyJVpUPkvCnW/vuiXGpylGUlnPDnB7311rARzAt3Mhswpjg==", + "dev": true }, "kind-of": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz", - "integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==" + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" }, - "klaw": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/klaw/-/klaw-1.3.1.tgz", - "integrity": "sha1-QIhDO0azsbolnXh4XY6W9zugJDk=", - "requires": { - "graceful-fs": "^4.1.9" - } + "kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true }, - "latest-version": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-3.1.0.tgz", - "integrity": "sha1-ogU4P+oyKzO1rjsYq+4NwvNW7hU=", + "last-call-webpack-plugin": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/last-call-webpack-plugin/-/last-call-webpack-plugin-3.0.0.tgz", + "integrity": "sha512-7KI2l2GIZa9p2spzPIVZBYyNKkN+e/SQPpnjlTiPhdbDW3F86tdKKELxKpzJ5sgU19wQWsACULZmpTPYHeWO5w==", + "dev": true, "requires": { - "package-json": "^4.0.0" + "lodash": "^4.17.5", + "webpack-sources": "^1.1.0" } }, "lazy-cache": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-1.0.4.tgz", - "integrity": "sha1-odePw6UEdMuAhF07O24dpJpEbo4=" + "integrity": "sha1-odePw6UEdMuAhF07O24dpJpEbo4=", + "dev": true }, "lcid": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", - "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lcid/-/lcid-2.0.0.tgz", + "integrity": "sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==", + "dev": true, "requires": { - "invert-kv": "^1.0.0" + "invert-kv": "^2.0.0" } }, "lcov-parse": { @@ -8209,71 +12584,45 @@ "left-pad": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/left-pad/-/left-pad-1.3.0.tgz", - "integrity": "sha512-XI5MPzVNApjAyhQzphX8BkmKsKUxD4LdyK24iZeQGinBN9yTQT3bFlCBy/aVx2HrNcqQGsdot8ghrjyrvMCoEA==" + "integrity": "sha512-XI5MPzVNApjAyhQzphX8BkmKsKUxD4LdyK24iZeQGinBN9yTQT3bFlCBy/aVx2HrNcqQGsdot8ghrjyrvMCoEA==", + "dev": true }, "leven": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz", - "integrity": "sha1-wuep93IJTe6dNCAq6KzORoeHVYA=" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true + }, + "levenary": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/levenary/-/levenary-1.1.1.tgz", + "integrity": "sha512-mkAdOIt79FD6irqjYSs4rdbnlT5vRonMEvBVPVb3XmevfS8kgRXwfes0dhPdEtzTWD/1eNE/Bm/G1iRt6DcnQQ==", + "dev": true, + "requires": { + "leven": "^3.1.0" + } }, "levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dev": true, "requires": { "prelude-ls": "~1.1.2", "type-check": "~0.3.2" } }, - "lighthouse-logger": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/lighthouse-logger/-/lighthouse-logger-1.2.0.tgz", - "integrity": "sha512-wzUvdIeJZhRsG6gpZfmSCfysaxNEr43i+QT+Hie94wvHDKFLi4n7C2GqZ4sTC+PH5b5iktmXJvU87rWvhP3lHw==", - "dev": true, - "requires": { - "debug": "^2.6.8", - "marky": "^1.2.0" - }, - "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - } - } - }, - "linkify-it": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.1.0.tgz", - "integrity": "sha512-4REs8/062kV2DSHxNfq5183zrqXMl7WP0WzABH9IeJI+NLm429FgE1PDecltYfnOoFDFlZGh2T8PfZn0r+GTRg==", - "requires": { - "uc.micro": "^1.0.1" - } - }, - "load-bmfont": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/load-bmfont/-/load-bmfont-1.4.0.tgz", - "integrity": "sha512-kT63aTAlNhZARowaNYcY29Fn/QYkc52M3l6V1ifRcPewg2lvUZDAj7R6dXjOL9D0sict76op3T5+odumDSF81g==", - "dev": true, - "requires": { - "buffer-equal": "0.0.1", - "mime": "^1.3.4", - "parse-bmfont-ascii": "^1.0.3", - "parse-bmfont-binary": "^1.0.5", - "parse-bmfont-xml": "^1.1.4", - "phin": "^2.9.1", - "xhr": "^2.0.1", - "xtend": "^4.0.0" - } + "lines-and-columns": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", + "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=", + "dev": true }, "load-json-file": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", + "dev": true, "requires": { "graceful-fs": "^4.1.2", "parse-json": "^2.2.0", @@ -8282,15 +12631,40 @@ "strip-bom": "^2.0.0" } }, + "loader-fs-cache": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/loader-fs-cache/-/loader-fs-cache-1.0.3.tgz", + "integrity": "sha512-ldcgZpjNJj71n+2Mf6yetz+c9bM4xpKtNds4LbqXzU/PTdeAX0g3ytnU1AJMEcTk2Lex4Smpe3Q/eCTsvUBxbA==", + "dev": true, + "requires": { + "find-cache-dir": "^0.1.1", + "mkdirp": "^0.5.1" + }, + "dependencies": { + "find-cache-dir": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-0.1.1.tgz", + "integrity": "sha1-yN765XyKUqinhPnjHFfHQumToLk=", + "dev": true, + "requires": { + "commondir": "^1.0.1", + "mkdirp": "^0.5.1", + "pkg-dir": "^1.0.0" + } + } + } + }, "loader-runner": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-2.3.1.tgz", - "integrity": "sha512-By6ZFY7ETWOc9RFaAIb23IjJVcM4dvJC/N57nmdz9RSkMXvAXGI7SyVlAw3v8vjtDRlqThgVDVmTnr9fqMlxkw==" + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-2.4.0.tgz", + "integrity": "sha512-Jsmr89RcXGIwivFY21FcRrisYZfvLMTWx5kOLc+JTxtpBOG6xML0vzbc6SEQG2FO9/4Fc3wW4LVcB5DmGflaRw==", + "dev": true }, "loader-utils": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.2.3.tgz", "integrity": "sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA==", + "dev": true, "requires": { "big.js": "^5.2.2", "emojis-list": "^2.0.0", @@ -8301,15 +12675,16 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, "requires": { "p-locate": "^2.0.0", "path-exists": "^3.0.0" } }, "lodash": { - "version": "4.17.11", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.11.tgz", - "integrity": "sha512-cQKh8igo5QUhZ7lg38DYWAxMvjSAKG0A8wGSVimP07SIUEK2UO+arSRKbRZWtelMtN5V0Hkwh5ryOto/SshYIg==" + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" }, "lodash._arraycopy": { "version": "3.0.0", @@ -8374,19 +12749,9 @@ "lodash._reinterpolate": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz", - "integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=" - }, - "lodash.assign": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz", - "integrity": "sha1-DZnzzNem0mHRm9rrkkUAXShYCOc=", + "integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=", "dev": true }, - "lodash.camelcase": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" - }, "lodash.clonedeep": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-3.0.2.tgz", @@ -8402,22 +12767,17 @@ "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", "integrity": "sha1-gteb/zCmfEAF/9XiUVMArZyk168=" }, - "lodash.defaults": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", - "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=" - }, - "lodash.endswith": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/lodash.endswith/-/lodash.endswith-4.2.1.tgz", - "integrity": "sha1-/tWawXOO0+I27dcGTsRWRIs3vAk=" - }, "lodash.escape": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/lodash.escape/-/lodash.escape-4.0.1.tgz", "integrity": "sha1-yQRGkMIeBClL6qUXcS/e0fqI3pg=", "dev": true }, + "lodash.flatten": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.flatten/-/lodash.flatten-4.4.0.tgz", + "integrity": "sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=" + }, "lodash.flattendeep": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", @@ -8429,6 +12789,11 @@ "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=" }, + "lodash.groupby": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/lodash.groupby/-/lodash.groupby-4.6.0.tgz", + "integrity": "sha1-Cwih3PaDl8OXhVwyOXg4Mt90A9E=" + }, "lodash.isarguments": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz", @@ -8451,11 +12816,6 @@ "resolved": "https://registry.npmjs.org/lodash.isfunction/-/lodash.isfunction-3.0.9.tgz", "integrity": "sha512-AirXNj15uRIMMPihnkInB4i3NHeb4iBtNg9WRWuK2o31S+ePwwNmDPaTL3o7dTJ+VXNZim7rFs4rxN4YU1oUJw==" }, - "lodash.isstring": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", - "integrity": "sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=" - }, "lodash.keys": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/lodash.keys/-/lodash.keys-3.1.2.tgz", @@ -8470,33 +12830,32 @@ "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=" + "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=", + "dev": true }, "lodash.sortby": { "version": "4.7.0", "resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz", - "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=" - }, - "lodash.startswith": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/lodash.startswith/-/lodash.startswith-4.2.1.tgz", - "integrity": "sha1-xZjErc4YiiflMUVzHNxsDnF3YAw=" + "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", + "dev": true }, "lodash.template": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/lodash.template/-/lodash.template-4.4.0.tgz", - "integrity": "sha1-5zoDhcg1VZF0bgILmWecaQ5o+6A=", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz", + "integrity": "sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A==", + "dev": true, "requires": { - "lodash._reinterpolate": "~3.0.0", + "lodash._reinterpolate": "^3.0.0", "lodash.templatesettings": "^4.0.0" } }, "lodash.templatesettings": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/lodash.templatesettings/-/lodash.templatesettings-4.1.0.tgz", - "integrity": "sha1-K01OlbpEDZFf8IvImeRVNmZxMxY=", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz", + "integrity": "sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ==", + "dev": true, "requires": { - "lodash._reinterpolate": "~3.0.0" + "lodash._reinterpolate": "^3.0.0" } }, "lodash.toarray": { @@ -8508,7 +12867,8 @@ "lodash.uniq": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", - "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=" + "integrity": "sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=", + "dev": true }, "log-driver": { "version": "1.2.7", @@ -8517,14 +12877,10 @@ "dev": true }, "loglevel": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.1.tgz", - "integrity": "sha1-4PyVEztu8nbNyIh82vJKpvFW+Po=" - }, - "longest": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/longest/-/longest-1.0.1.tgz", - "integrity": "sha1-MKCy2jj3N3DoKUoNIuZiXtd9AJc=" + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.7.tgz", + "integrity": "sha512-cY2eLFrQSAfVPhCgH1s7JI73tMbg9YC3v3+ZHVW67sBS7UxWzNEk/ZBbSfLykBWHp33dqqtOv82gjhKEi81T/A==", + "dev": true }, "loose-envify": { "version": "1.4.0", @@ -8538,6 +12894,7 @@ "version": "1.6.0", "resolved": "https://registry.npmjs.org/loud-rejection/-/loud-rejection-1.6.0.tgz", "integrity": "sha1-W0b4AUft7leIcPCG0Eghz5mOVR8=", + "dev": true, "requires": { "currently-unhandled": "^0.4.1", "signal-exit": "^3.0.0" @@ -8546,7 +12903,8 @@ "lower-case": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-1.1.4.tgz", - "integrity": "sha1-miyr0bno4K6ZOkv31YdcOcQujqw=" + "integrity": "sha1-miyr0bno4K6ZOkv31YdcOcQujqw=", + "dev": true }, "lower-case-first": { "version": "1.0.2", @@ -8557,49 +12915,63 @@ "lower-case": "^1.1.2" } }, - "lowercase-keys": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", - "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==" - }, "lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" + "yallist": "^3.0.2" } }, "make-dir": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", - "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, "requires": { - "pify": "^3.0.0" + "pify": "^4.0.1", + "semver": "^5.6.0" }, "dependencies": { "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", + "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", + "dev": true } } }, "make-error": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.5.tgz", - "integrity": "sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g==", + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", "dev": true }, "makeerror": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.11.tgz", "integrity": "sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw=", + "dev": true, "requires": { "tmpl": "1.0.x" } }, + "mamacro": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/mamacro/-/mamacro-0.0.3.tgz", + "integrity": "sha512-qMEwh+UujcQ+kbz3T6V+wAmO2U8veoq2w+3wY8MquqwVA3jChfwY+Tk52GZKDfACEPjuZ7r2oJLejwpt8jtwTA==", + "dev": true + }, + "map-age-cleaner": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz", + "integrity": "sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==", + "dev": true, + "requires": { + "p-defer": "^1.0.0" + } + }, "map-cache": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", @@ -8608,7 +12980,8 @@ "map-obj": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz", - "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=" + "integrity": "sha1-2TPOuSBdgr3PSIb2dCvcK03qFG0=", + "dev": true }, "map-visit": { "version": "1.0.0", @@ -8618,16 +12991,13 @@ "object-visit": "^1.0.0" } }, - "markdown-it": { - "version": "8.4.2", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-8.4.2.tgz", - "integrity": "sha512-GcRz3AWTqSUphY3vsUqQSFMbgR38a4Lh3GWlHRh/7MRwz8mcu9n2IO7HOh+bXHrR9kOPDl5RNCaEsrneb+xhHQ==", + "markdown-to-jsx": { + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/markdown-to-jsx/-/markdown-to-jsx-6.10.3.tgz", + "integrity": "sha512-PSoUyLnW/xoW6RsxZrquSSz5eGEOTwa15H5eqp3enmrp8esmgDJmhzd6zmQ9tgAA9TxJzx1Hmf3incYU/IamoQ==", "requires": { - "argparse": "^1.0.7", - "entities": "~1.1.1", - "linkify-it": "^2.0.0", - "mdurl": "^1.0.1", - "uc.micro": "^1.0.5" + "prop-types": "^15.6.2", + "unquote": "^1.1.0" } }, "marked": { @@ -8637,79 +13007,58 @@ "dev": true }, "marked-terminal": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/marked-terminal/-/marked-terminal-3.1.1.tgz", - "integrity": "sha512-7UBFww1rdx0w9HehLMCVYa8/AxXaiDigDfMsJcj82/wgLQG9cj+oiMAVlJpeWD57VFJY2OYY+bKeEVIjIlxi+w==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/marked-terminal/-/marked-terminal-3.3.0.tgz", + "integrity": "sha512-+IUQJ5VlZoAFsM5MHNT7g3RHSkA3eETqhRCdXv4niUMAKHQ7lb1yvAcuGPmm4soxhmtX13u4Li6ZToXtvSEH+A==", "dev": true, "requires": { + "ansi-escapes": "^3.1.0", "cardinal": "^2.1.1", "chalk": "^2.4.1", "cli-table": "^0.3.1", - "lodash.assign": "^4.2.0", - "node-emoji": "^1.4.1" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } + "node-emoji": "^1.4.1", + "supports-hyperlinks": "^1.0.1" } }, - "marky": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/marky/-/marky-1.2.1.tgz", - "integrity": "sha512-md9k+Gxa3qLH6sUKpeC2CNkJK/Ld+bEz5X96nYwloqphQE0CKCVEKco/6jxEZixinqNdz5RFi/KaCyfbMDMAXQ==", - "dev": true - }, - "math-expression-evaluator": { - "version": "1.2.17", - "resolved": "https://registry.npmjs.org/math-expression-evaluator/-/math-expression-evaluator-1.2.17.tgz", - "integrity": "sha1-3oGf282E3M2PrlnGrreWFbnSZqw=" - }, - "math-random": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/math-random/-/math-random-1.0.1.tgz", - "integrity": "sha1-izqsWIuKZuSXXjzepn97sylgH6w=" - }, "md5.js": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", + "dev": true, "requires": { "hash-base": "^3.0.0", "inherits": "^2.0.1", "safe-buffer": "^5.1.2" } }, - "mdurl": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", - "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=" + "mdn-data": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz", + "integrity": "sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==", + "dev": true }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=", + "dev": true }, "mem": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/mem/-/mem-1.1.0.tgz", - "integrity": "sha1-Xt1StIXKHZAP5kiVUFOZoN+kX3Y=", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/mem/-/mem-4.3.0.tgz", + "integrity": "sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==", + "dev": true, "requires": { - "mimic-fn": "^1.0.0" + "map-age-cleaner": "^0.1.1", + "mimic-fn": "^2.0.0", + "p-is-promise": "^2.0.0" } }, "memory-fs": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.4.1.tgz", "integrity": "sha1-OpoguEYlI+RHz7x+i7gO1me/xVI=", + "dev": true, "requires": { "errno": "^0.1.3", "readable-stream": "^2.0.1" @@ -8719,6 +13068,7 @@ "version": "3.7.0", "resolved": "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz", "integrity": "sha1-cstmi0JSKCkKu/qFaJJYcwioAfs=", + "dev": true, "requires": { "camelcase-keys": "^2.0.0", "decamelize": "^1.1.2", @@ -8732,28 +13082,57 @@ "trim-newlines": "^1.0.0" } }, - "merge": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/merge/-/merge-1.2.1.tgz", - "integrity": "sha512-VjFo4P5Whtj4vsLzsYBu5ayHhoHJ0UqNm7ibvShmbmoz7tGi0vXaoJbGdB+GmDMLUdg8DpQXEIeVDAe8MaABvQ==" + "merge-deep": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/merge-deep/-/merge-deep-3.0.2.tgz", + "integrity": "sha512-T7qC8kg4Zoti1cFd8Cr0M+qaZfOwjlPDEdZIIPPB2JZctjaPM4fX+i7HOId69tAti2fvO6X5ldfYUONDODsrkA==", + "dev": true, + "requires": { + "arr-union": "^3.1.0", + "clone-deep": "^0.2.4", + "kind-of": "^3.0.2" + }, + "dependencies": { + "kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "requires": { + "is-buffer": "^1.1.5" + } + } + } }, "merge-descriptors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=", + "dev": true + }, + "merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true }, - "merge-stream": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-1.0.1.tgz", - "integrity": "sha1-QEEgLVCKNCugAXQAjfDCUbjBNeE=", - "requires": { - "readable-stream": "^2.0.1" - } + "merge2": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.3.0.tgz", + "integrity": "sha512-2j4DAdlBOkiSZIsaXk4mTE3sRS02yBHAtfy127xRV3bQUFqXkjHCHLW6Scv7DwNRbIWNHH8zpnz9zMaKXIdvYw==", + "dev": true }, "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=", + "dev": true + }, + "microevent.ts": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/microevent.ts/-/microevent.ts-0.1.1.tgz", + "integrity": "sha512-jo1OfR4TaEwd5HOrt5+tAZ9mqT4jmpNAusXtyfNzqVm9uiSYFZlKM1wYL4oU7azZW/PxQW53wM0S6OR1JHNa2g==", + "dev": true }, "micromatch": { "version": "3.1.10", @@ -8779,6 +13158,7 @@ "version": "4.0.1", "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", + "dev": true, "requires": { "bn.js": "^4.0.0", "brorand": "^1.0.1" @@ -8787,25 +13167,29 @@ "mime": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "dev": true }, "mime-db": { "version": "1.37.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz", - "integrity": "sha512-R3C4db6bgQhlIhPU48fUtdVmKnflq+hRdad7IyKhtFj06VPNVdk2RhiYL3UjQIlso8L+YxAtFkobT0VK+S/ybg==" + "integrity": "sha512-R3C4db6bgQhlIhPU48fUtdVmKnflq+hRdad7IyKhtFj06VPNVdk2RhiYL3UjQIlso8L+YxAtFkobT0VK+S/ybg==", + "dev": true }, "mime-types": { "version": "2.1.21", "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz", "integrity": "sha512-3iL6DbwpyLzjR3xHSFNFeb9Nz/M8WDkX33t1GFQnFOllWk8pOrh/LSrB5OXlnlW5P9LH73X6loW/eogc+F5lJg==", + "dev": true, "requires": { "mime-db": "~1.37.0" } }, "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==" + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true }, "min-document": { "version": "2.19.0", @@ -8815,33 +13199,94 @@ "dom-walk": "^0.1.0" } }, + "mini-css-extract-plugin": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-0.9.0.tgz", + "integrity": "sha512-lp3GeY7ygcgAmVIcRPBVhIkf8Us7FZjA+ILpal44qLdSu11wmjKQ3d9k15lfD7pO4esu9eUIAW7qiYIBppv40A==", + "dev": true, + "requires": { + "loader-utils": "^1.1.0", + "normalize-url": "1.9.1", + "schema-utils": "^1.0.0", + "webpack-sources": "^1.1.0" + } + }, "minimalistic-assert": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", - "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==", + "dev": true }, "minimalistic-crypto-utils": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", - "integrity": "sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=" + "integrity": "sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=", + "dev": true }, "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, "requires": { "brace-expansion": "^1.1.7" } }, "minimist": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", + "dev": true + }, + "minipass": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.1.tgz", + "integrity": "sha512-UFqVihv6PQgwj8/yTGvl9kPz7xIAY+R5z6XYjRInD3Gk3qx6QGSD6zEcpeG4Dy/lQnv1J6zv8ejV90hyYIKf3w==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + } + } + }, + "minipass-collect": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", + "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "minipass-flush": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", + "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "minipass-pipeline": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.2.tgz", + "integrity": "sha512-3JS5A2DKhD2g0Gg8x3yamO0pj7YeKGwVlDS90pF++kxptwx/F+B//roxf9SqYil5tQo65bijy+dAuAFZmYOouA==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } }, "mississippi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mississippi/-/mississippi-2.0.0.tgz", - "integrity": "sha512-zHo8v+otD1J10j/tC+VNoGK9keCuByhKovAvdn74dmxJl9+mWHnx6EMsDN4lgRoMI/eYo2nchAxniIbUPb5onw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mississippi/-/mississippi-3.0.0.tgz", + "integrity": "sha512-x471SsVjUtBRtcvd4BzKE9kFC+/2TeWgKCgw0bZcw1b9l2X3QX5vCWgF+KaZaYm87Ss//rHnWryupDrgLvmSkA==", + "dev": true, "requires": { "concat-stream": "^1.5.0", "duplexify": "^3.4.2", @@ -8849,16 +13294,28 @@ "flush-write-stream": "^1.0.0", "from2": "^2.1.0", "parallel-transform": "^1.1.0", - "pump": "^2.0.1", + "pump": "^3.0.0", "pumpify": "^1.3.3", "stream-each": "^1.1.0", "through2": "^2.0.0" + }, + "dependencies": { + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + } } }, "mixin-deep": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.1.tgz", - "integrity": "sha512-8ZItLHeEgaqEvd5lYBXfm4EZSFCX29Jb9K+lAHhDKzReKBQKj3R+7NOF6tjqYi9t4oI8VUfaWITJQm86wnXGNQ==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", + "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", "requires": { "for-in": "^1.0.2", "is-extendable": "^1.0.1" @@ -8874,21 +13331,33 @@ } } }, - "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "mixin-object": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mixin-object/-/mixin-object-2.0.1.tgz", + "integrity": "sha1-T7lJRB2rGCVA8f4DW6YOGUel5X4=", + "dev": true, "requires": { - "minimist": "0.0.8" + "for-in": "^0.1.3", + "is-extendable": "^0.1.1" }, "dependencies": { - "minimist": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" + "for-in": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/for-in/-/for-in-0.1.8.tgz", + "integrity": "sha1-2Hc5COMSVhCZUrH9ubP6hn0ndeE=", + "dev": true } } }, + "mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dev": true, + "requires": { + "minimist": "^1.2.5" + } + }, "moo": { "version": "0.4.3", "resolved": "https://registry.npmjs.org/moo/-/moo-0.4.3.tgz", @@ -8899,6 +13368,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/move-concurrently/-/move-concurrently-1.0.1.tgz", "integrity": "sha1-viwAX9oy4LKa8fBdfEszIUxwH5I=", + "dev": true, "requires": { "aproba": "^1.1.1", "copy-concurrently": "^1.0.0", @@ -8917,6 +13387,7 @@ "version": "6.2.3", "resolved": "https://registry.npmjs.org/multicast-dns/-/multicast-dns-6.2.3.tgz", "integrity": "sha512-ji6J5enbMyGRHIAkAOu3WdV8nggqviKCEKtXcOqfphZZtQrmHKycfynJ2V7eVPUA4NhJ6V7Wf4TmGbTwKE9B6g==", + "dev": true, "requires": { "dns-packet": "^1.3.1", "thunky": "^1.0.2" @@ -8925,17 +13396,20 @@ "multicast-dns-service-types": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz", - "integrity": "sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE=" + "integrity": "sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE=", + "dev": true }, "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=" + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true }, "nan": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.12.1.tgz", - "integrity": "sha512-JY7V6lRkStKcKTvHO5NVSQRv+RV+FIL5pvDoLiAtSL9pKlC5x9PKQcZDsq7m4FO4d57mkhC6Z+QhAh3Jdk5JFw==", + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", + "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==", + "dev": true, "optional": true }, "nanomatch": { @@ -8959,12 +13433,13 @@ "natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=" + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true }, "nearley": { - "version": "2.16.0", - "resolved": "https://registry.npmjs.org/nearley/-/nearley-2.16.0.tgz", - "integrity": "sha512-Tr9XD3Vt/EujXbZBv6UAHYoLUSMQAxSsTnm9K3koXzjzNWY195NqALeyrzLZBKzAkL3gl92BcSogqrHjD8QuUg==", + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/nearley/-/nearley-2.19.0.tgz", + "integrity": "sha512-2v52FTw7RPqieZr3Gth1luAXZR7Je6q3KaDHY5bjl/paDUdMu35fZ8ICNgiYJRr3tf3NMvIQQR1r27AvEr9CRA==", "dev": true, "requires": { "commander": "^2.19.0", @@ -8975,32 +13450,42 @@ } }, "negotiator": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", - "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", + "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==", + "dev": true }, "neo-async": { "version": "2.6.0", "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.0.tgz", - "integrity": "sha512-MFh0d/Wa7vkKO3Y3LlacqAEeHK0mckVqzDieUKTT+KGxi+zIpeVsFxymkIiRpbpDziHc290Xr9A1O4Om7otoRA==" + "integrity": "sha512-MFh0d/Wa7vkKO3Y3LlacqAEeHK0mckVqzDieUKTT+KGxi+zIpeVsFxymkIiRpbpDziHc290Xr9A1O4Om7otoRA==", + "dev": true }, "next-tick": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz", - "integrity": "sha1-yobR/ogoFpsBICCOPchCS524NCw=" + "integrity": "sha1-yobR/ogoFpsBICCOPchCS524NCw=", + "dev": true + }, + "nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true }, "no-case": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/no-case/-/no-case-2.3.2.tgz", "integrity": "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ==", + "dev": true, "requires": { "lower-case": "^1.1.1" } }, "node-emoji": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.8.1.tgz", - "integrity": "sha512-+ktMAh1Jwas+TnGodfCfjUbJKoANqPaJFN0z0iqh41eqD8dvguNzcitVSBSVK1pidz0AqGbLKcoVuVLRVZ/aVg==", + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/node-emoji/-/node-emoji-1.10.0.tgz", + "integrity": "sha512-Yt3384If5H6BYGVHiHwTL+99OzJKHhgp82S8/dktEK73T26BazdgZ4JZh92xSVtGNJvz9UbXdNAc5hcrXV42vw==", "dev": true, "requires": { "lodash.toarray": "^4.4.0" @@ -9016,19 +13501,22 @@ } }, "node-forge": { - "version": "0.7.5", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.7.5.tgz", - "integrity": "sha512-MmbQJ2MTESTjt3Gi/3yG1wGpIMhUfcIypUCGtTizFR9IiccFwxSpfp0vtIZlkFclEqERemxfnSdZEMR9VqqEFQ==" + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.9.0.tgz", + "integrity": "sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ==", + "dev": true }, "node-int64": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", - "integrity": "sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs=" + "integrity": "sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs=", + "dev": true }, "node-libs-browser": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-2.1.0.tgz", - "integrity": "sha512-5AzFzdoIMb89hBGMZglEegffzgRg+ZFoUmisQ8HI4j1KDdpx13J0taNp2y9xPbur6W61gepGDDotGBVQ7mfUCg==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-2.2.1.tgz", + "integrity": "sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q==", + "dev": true, "requires": { "assert": "^1.1.1", "browserify-zlib": "^0.2.0", @@ -9037,10 +13525,10 @@ "constants-browserify": "^1.0.0", "crypto-browserify": "^3.11.0", "domain-browser": "^1.1.1", - "events": "^1.0.0", + "events": "^3.0.0", "https-browserify": "^1.0.0", "os-browserify": "^0.3.0", - "path-browserify": "0.0.0", + "path-browserify": "0.0.1", "process": "^0.11.10", "punycode": "^1.2.4", "querystring-es3": "^0.2.0", @@ -9051,42 +13539,50 @@ "timers-browserify": "^2.0.4", "tty-browserify": "0.0.0", "url": "^0.11.0", - "util": "^0.10.3", - "vm-browserify": "0.0.4" + "util": "^0.11.0", + "vm-browserify": "^1.0.1" }, "dependencies": { "punycode": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", + "dev": true } } }, + "node-modules-regexp": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz", + "integrity": "sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA=", + "dev": true + }, "node-notifier": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/node-notifier/-/node-notifier-5.3.0.tgz", - "integrity": "sha512-AhENzCSGZnZJgBARsUjnQ7DnZbzyP+HxlVXuD0xqAnvL8q+OqtSX7lGg9e8nHzwXkMMXNdVeqq4E2M3EUAqX6Q==", + "version": "5.4.3", + "resolved": "https://registry.npmjs.org/node-notifier/-/node-notifier-5.4.3.tgz", + "integrity": "sha512-M4UBGcs4jeOK9CjTsYwkvH6/MzuUmGCyTW+kCY7uO+1ZVr0+FHGdPdIf5CCLqAaxnRrWidyoQlNkMIIVwbKB8Q==", + "dev": true, "requires": { "growly": "^1.3.0", + "is-wsl": "^1.1.0", "semver": "^5.5.0", "shellwords": "^0.1.1", "which": "^1.3.0" } }, - "node-resemble-js": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/node-resemble-js/-/node-resemble-js-0.2.0.tgz", - "integrity": "sha1-ijbGZ4ph5dhFX+xYAJsbAnGxkJo=", + "node-releases": { + "version": "1.1.50", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.50.tgz", + "integrity": "sha512-lgAmPv9eYZ0bGwUYAKlr8MG6K4CvWliWqnkcT2P8mMAgVrH3lqfBPorFlxiG1pHQnqmavJZ9vbMXUTNyMLbrgQ==", "dev": true, "requires": { - "jpeg-js": "0.2.0", - "pngjs": "~2.2.0" + "semver": "^6.3.0" }, "dependencies": { - "pngjs": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-2.2.0.tgz", - "integrity": "sha1-ZJZjYJoOurh8jwiz/nJASLUdnX8=", + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true } } @@ -9095,6 +13591,7 @@ "version": "2.4.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.4.0.tgz", "integrity": "sha512-9jjUFbTPfEy3R/ad/2oNbKtW9Hgovl5O1FvFWKkKblNXoN/Oou6+9+KKohPK13Yc3/TyunyWhJp6gvRNR/PPAw==", + "dev": true, "requires": { "hosted-git-info": "^2.1.4", "is-builtin-module": "^1.0.0", @@ -9106,6 +13603,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", + "dev": true, "requires": { "remove-trailing-separator": "^1.0.1" } @@ -9113,7 +13611,8 @@ "normalize-range": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=" + "integrity": "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI=", + "dev": true }, "normalize-scroll-left": { "version": "0.1.2", @@ -9124,6 +13623,7 @@ "version": "1.9.1", "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-1.9.1.tgz", "integrity": "sha1-LMDWazHqIwNkWENuNiDYWVTGbDw=", + "dev": true, "requires": { "object-assign": "^4.0.1", "prepend-http": "^1.0.0", @@ -9135,6 +13635,7 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", "integrity": "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=", + "dev": true, "requires": { "path-key": "^2.0.0" } @@ -9143,6 +13644,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", "integrity": "sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==", + "dev": true, "requires": { "boolbase": "~1.0.0" } @@ -9150,22 +13652,26 @@ "num2fraction": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/num2fraction/-/num2fraction-1.2.2.tgz", - "integrity": "sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4=" + "integrity": "sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4=", + "dev": true }, "number-is-nan": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=" + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "dev": true }, "nwsapi": { - "version": "2.0.9", - "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.0.9.tgz", - "integrity": "sha512-nlWFSCTYQcHk/6A9FFnfhKc14c3aFhfdNBXgo8Qgi9QTBu/qg3Ww+Uiz9wMzXd1T8GFxPc2QIHB6Qtf2XFryFQ==" + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.0.tgz", + "integrity": "sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ==", + "dev": true }, "oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", + "dev": true }, "object-assign": { "version": "4.1.1", @@ -9201,9 +13707,9 @@ } }, "object-hash": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-1.1.5.tgz", - "integrity": "sha1-vdhE4DDQhhtpLKF1xsq2ho7CM9c=", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-2.0.3.tgz", + "integrity": "sha512-JPKn0GMu+Fa3zt3Bmr66JhokJU5BaNBIh4ZeTlaCBzrBsOeXzwcKKAK1tbLiPKgvwmPXsDvvLHoWh5Bm7ofIYg==", "dev": true }, "object-inspect": { @@ -9221,7 +13727,14 @@ "object-keys": { "version": "1.0.12", "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.12.tgz", - "integrity": "sha512-FTMyFUm2wBcGHnH2eXmz7tC6IwlqQZ6mVZ+6dm6vZ4IQIHjs6FdNsQBuKGPuUUUY6NfJw2PshC08Tn6LzLDOag==" + "integrity": "sha512-FTMyFUm2wBcGHnH2eXmz7tC6IwlqQZ6mVZ+6dm6vZ4IQIHjs6FdNsQBuKGPuUUUY6NfJw2PshC08Tn6LzLDOag==", + "dev": true + }, + "object-path": { + "version": "0.11.4", + "resolved": "https://registry.npmjs.org/object-path/-/object-path-0.11.4.tgz", + "integrity": "sha1-NwrnUvvzfePqcKhhwju6iRVpGUk=", + "dev": true }, "object-visit": { "version": "1.0.1", @@ -9255,24 +13768,54 @@ "has": "^1.0.3" } }, + "object.fromentries": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.1.tgz", + "integrity": "sha512-PUQv8Hbg3j2QX0IQYv3iAGCbGcu4yY4KQ92/dhA4sFSixBmSmp13UpDLs6jGK8rBtbmhNNIK99LD2k293jpiGA==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.15.0", + "function-bind": "^1.1.1", + "has": "^1.0.3" + }, + "dependencies": { + "es-abstract": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.15.0.tgz", + "integrity": "sha512-bhkEqWJ2t2lMeaJDuk7okMkJWI/yqgH/EoGwpcvv0XW9RWQsRspI4wt6xuyuvMvvQE3gg/D9HXppgk21w78GyQ==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.0", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.0", + "is-callable": "^1.1.4", + "is-regex": "^1.0.4", + "object-inspect": "^1.6.0", + "object-keys": "^1.1.1", + "string.prototype.trimleft": "^2.1.0", + "string.prototype.trimright": "^2.1.0" + } + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + } + } + }, "object.getownpropertydescriptors": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz", "integrity": "sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY=", + "dev": true, "requires": { "define-properties": "^1.1.2", "es-abstract": "^1.5.1" } }, - "object.omit": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/object.omit/-/object.omit-2.0.1.tgz", - "integrity": "sha1-Gpx0SCnznbuFjHbKNXmuKlTr0fo=", - "requires": { - "for-own": "^0.1.4", - "is-extendable": "^0.1.1" - } - }, "object.pick": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", @@ -9296,35 +13839,58 @@ "obuf": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/obuf/-/obuf-1.1.2.tgz", - "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==" + "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==", + "dev": true }, "on-finished": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "dev": true, "requires": { "ee-first": "1.1.1" } }, "on-headers": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.1.tgz", - "integrity": "sha1-ko9dD0cNSTQmUepnlLCFfBAGk/c=" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz", + "integrity": "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==", + "dev": true }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, "requires": { "wrappy": "1" } }, "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", + "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "open": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/open/-/open-7.0.3.tgz", + "integrity": "sha512-sP2ru2v0P290WFfv49Ap8MF6PkzGNnGlAwHweB4WR4mr5d2d0woiCluUeJ218w7/+PmoBy9JmYgD5A4mLcWOFA==", + "dev": true, "requires": { - "mimic-fn": "^1.0.0" + "is-docker": "^2.0.0", + "is-wsl": "^2.1.1" + }, + "dependencies": { + "is-wsl": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.1.1.tgz", + "integrity": "sha512-umZHcSrwlDHo2TGMXv0DZ8dIUGunZ2Iv68YZnrmCiBPkZ4aaOhtv7pXJKeki9k3qJ3RJr0cDyitcl5wEH3AYog==", + "dev": true + } } }, "opener": { @@ -9334,97 +13900,102 @@ "dev": true }, "opn": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/opn/-/opn-5.2.0.tgz", - "integrity": "sha512-Jd/GpzPyHF4P2/aNOVmS3lfMSWV9J7cOhCG1s08XCEAsPkB7lp6ddiU0J7XzyQRDUh8BqJ7PchfINjR8jyofRQ==", + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/opn/-/opn-5.5.0.tgz", + "integrity": "sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA==", + "dev": true, "requires": { "is-wsl": "^1.1.0" } }, - "optimist": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", - "integrity": "sha1-2j6nRob6IaGaERwybpDrFaAZZoY=", + "optimize-css-assets-webpack-plugin": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/optimize-css-assets-webpack-plugin/-/optimize-css-assets-webpack-plugin-5.0.3.tgz", + "integrity": "sha512-q9fbvCRS6EYtUKKSwI87qm2IxlyJK5b4dygW1rKUBT6mMDhdG5e5bZT63v6tnJR9F9FB/H5a0HTmtw+laUBxKA==", + "dev": true, "requires": { - "minimist": "~0.0.1", - "wordwrap": "~0.0.2" - }, - "dependencies": { - "minimist": { - "version": "0.0.10", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz", - "integrity": "sha1-3j+YVD2/lggr5IrRoMfNqDYwHc8=" - } + "cssnano": "^4.1.10", + "last-call-webpack-plugin": "^3.0.0" } }, "optionator": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz", - "integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=", + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dev": true, "requires": { "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.4", + "fast-levenshtein": "~2.0.6", "levn": "~0.3.0", "prelude-ls": "~1.1.2", "type-check": "~0.3.2", - "wordwrap": "~1.0.0" - }, - "dependencies": { - "wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=" - } + "word-wrap": "~1.2.3" } }, "original": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/original/-/original-1.0.2.tgz", "integrity": "sha512-hyBVl6iqqUOJ8FqRe+l/gS8H+kKYjrEndd5Pm1MfBtsEKA038HkkdbAl/72EAXGyonD/PFsvmVG+EvcIpliMBg==", + "dev": true, "requires": { "url-parse": "^1.4.3" } }, - "os": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/os/-/os-0.1.1.tgz", - "integrity": "sha1-IIhF6J4ZOtTZcUdLk5R3NqVtE/M=", - "dev": true - }, "os-browserify": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/os-browserify/-/os-browserify-0.3.0.tgz", - "integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=" - }, - "os-homedir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=" + "integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=", + "dev": true }, "os-locale": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-2.1.0.tgz", - "integrity": "sha512-3sslG3zJbEYcaC4YVAvDorjGxc7tv6KVATnLPZONiljsUncvihe9BQoVCEs0RZ1kmf4Hk9OBqlZfJZWI4GanKA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-3.1.0.tgz", + "integrity": "sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q==", + "dev": true, "requires": { - "execa": "^0.7.0", - "lcid": "^1.0.0", - "mem": "^1.1.0" + "execa": "^1.0.0", + "lcid": "^2.0.0", + "mem": "^4.0.0" } }, "os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=" + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "dev": true + }, + "p-defer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-1.0.0.tgz", + "integrity": "sha1-n26xgvbJqozXQwBKfU+WsZaw+ww=", + "dev": true + }, + "p-each-series": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-1.0.0.tgz", + "integrity": "sha1-kw89Et0fUOdDRFeiLNbwSsatf3E=", + "dev": true, + "requires": { + "p-reduce": "^1.0.0" + } }, "p-finally": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=" + "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=", + "dev": true + }, + "p-is-promise": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-2.1.0.tgz", + "integrity": "sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg==", + "dev": true }, "p-limit": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, "requires": { "p-try": "^1.0.0" } @@ -9433,146 +14004,117 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, "requires": { "p-limit": "^1.1.0" } }, "p-map": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-1.2.0.tgz", - "integrity": "sha512-r6zKACMNhjPJMTl8KcFH4li//gkrXWfbD6feV8l6doRHlzljFWGJ2AP6iKaCJXyZmAUMOPtvbW7EXkbWO/pLEA==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, + "requires": { + "aggregate-error": "^3.0.0" + } }, - "p-try": { + "p-reduce": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=" + "resolved": "https://registry.npmjs.org/p-reduce/-/p-reduce-1.0.0.tgz", + "integrity": "sha1-GMKw3ZNqRpClKfgjH1ig/bakffo=", + "dev": true }, - "package-json": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/package-json/-/package-json-4.0.1.tgz", - "integrity": "sha1-iGmgQBJTZhxMTKPabCEh7VVfXu0=", + "p-retry": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/p-retry/-/p-retry-3.0.1.tgz", + "integrity": "sha512-XE6G4+YTTkT2a0UWb2kjZe8xNwf8bIbnqpc/IS/idOBVhyves0mK5OJgeocjx7q5pvX/6m23xuzVPYT1uGM73w==", + "dev": true, "requires": { - "got": "^6.7.1", - "registry-auth-token": "^3.0.1", - "registry-url": "^3.0.3", - "semver": "^5.1.0" + "retry": "^0.12.0" } }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + }, "pako": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.7.tgz", - "integrity": "sha512-3HNK5tW4x8o5mO8RuHZp3Ydw9icZXx0RANAOMzlMzx7LVXhMJ4mo3MOBpzyd7r/+RUu8BmndP47LXT+vzjtWcQ==" + "integrity": "sha512-3HNK5tW4x8o5mO8RuHZp3Ydw9icZXx0RANAOMzlMzx7LVXhMJ4mo3MOBpzyd7r/+RUu8BmndP47LXT+vzjtWcQ==", + "dev": true }, "parallel-transform": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.1.0.tgz", "integrity": "sha1-1BDwZbBdojCB/NEPKIVMKb2jOwY=", + "dev": true, "requires": { - "cyclist": "~0.2.2", - "inherits": "^2.0.3", - "readable-stream": "^2.1.5" - } - }, - "param-case": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/param-case/-/param-case-2.1.1.tgz", - "integrity": "sha1-35T9jPZTHs915r75oIWPvHK+Ikc=", - "requires": { - "no-case": "^2.2.0" - } - }, - "parse-asn1": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.1.tgz", - "integrity": "sha512-KPx7flKXg775zZpnp9SxJlz00gTd4BmJ2yJufSc44gMCRrRQ7NSzAcSJQfifuOLgW6bEi+ftrALtsgALeB2Adw==", - "requires": { - "asn1.js": "^4.0.0", - "browserify-aes": "^1.0.0", - "create-hash": "^1.1.0", - "evp_bytestokey": "^1.0.0", - "pbkdf2": "^3.0.3" + "cyclist": "~0.2.2", + "inherits": "^2.0.3", + "readable-stream": "^2.1.5" } }, - "parse-bmfont-ascii": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/parse-bmfont-ascii/-/parse-bmfont-ascii-1.0.6.tgz", - "integrity": "sha1-Eaw8P/WPfCAgqyJ2kHkQjU36AoU=", - "dev": true - }, - "parse-bmfont-binary": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/parse-bmfont-binary/-/parse-bmfont-binary-1.0.6.tgz", - "integrity": "sha1-0Di0dtPp3Z2x4RoLDlOiJ5K2kAY=", - "dev": true - }, - "parse-bmfont-xml": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/parse-bmfont-xml/-/parse-bmfont-xml-1.1.4.tgz", - "integrity": "sha512-bjnliEOmGv3y1aMEfREMBJ9tfL3WR0i0CKPj61DnSLaoxWR3nLrsQrEbCId/8rF4NyRF0cCqisSVXyQYWM+mCQ==", + "param-case": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/param-case/-/param-case-2.1.1.tgz", + "integrity": "sha1-35T9jPZTHs915r75oIWPvHK+Ikc=", "dev": true, "requires": { - "xml-parse-from-string": "^1.0.0", - "xml2js": "^0.4.5" + "no-case": "^2.2.0" } }, - "parse-glob": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/parse-glob/-/parse-glob-3.0.4.tgz", - "integrity": "sha1-ssN2z7EfNVE7rdFz7wu246OIORw=", + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, "requires": { - "glob-base": "^0.3.0", - "is-dotfile": "^1.0.0", - "is-extglob": "^1.0.0", - "is-glob": "^2.0.0" + "callsites": "^3.0.0" }, "dependencies": { - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "requires": { - "is-extglob": "^1.0.0" - } + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true } } }, - "parse-headers": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/parse-headers/-/parse-headers-2.0.1.tgz", - "integrity": "sha1-aug6eqJanZtwCswoaYzR8e1+lTY=", + "parse-asn1": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.1.tgz", + "integrity": "sha512-KPx7flKXg775zZpnp9SxJlz00gTd4BmJ2yJufSc44gMCRrRQ7NSzAcSJQfifuOLgW6bEi+ftrALtsgALeB2Adw==", "dev": true, "requires": { - "for-each": "^0.3.2", - "trim": "0.0.1" + "asn1.js": "^4.0.0", + "browserify-aes": "^1.0.0", + "create-hash": "^1.1.0", + "evp_bytestokey": "^1.0.0", + "pbkdf2": "^3.0.3" } }, "parse-json": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, "requires": { "error-ex": "^1.2.0" } }, - "parse-passwd": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz", - "integrity": "sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY=" - }, "parse5": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/parse5/-/parse5-4.0.0.tgz", - "integrity": "sha512-VrZ7eOd3T1Fk4XWNXMgiGBK/z0MG48BWG2uQNU4I72fkQuKUTZpl+u9k+CxEG0twMVzSmXEEz12z5Fnw1jIQFA==" + "integrity": "sha512-VrZ7eOd3T1Fk4XWNXMgiGBK/z0MG48BWG2uQNU4I72fkQuKUTZpl+u9k+CxEG0twMVzSmXEEz12z5Fnw1jIQFA==", + "dev": true }, "parseurl": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz", - "integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M=" + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "dev": true }, "pascal-case": { "version": "2.0.1", @@ -9589,20 +14131,11 @@ "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=" }, - "path": { - "version": "0.12.7", - "resolved": "https://registry.npmjs.org/path/-/path-0.12.7.tgz", - "integrity": "sha1-1NwqUGxM4hl+tIHr/NWzbAFAsQ8=", - "dev": true, - "requires": { - "process": "^0.11.1", - "util": "^0.10.3" - } - }, "path-browserify": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.0.tgz", - "integrity": "sha1-oLhwcpquIUAFt9UDLsLLuw+0RRo=" + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.1.tgz", + "integrity": "sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ==", + "dev": true }, "path-case": { "version": "2.1.1", @@ -9616,32 +14149,38 @@ "path-dirname": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz", - "integrity": "sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=" + "integrity": "sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=", + "dev": true }, "path-exists": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true }, "path-is-inside": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz", - "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=" + "integrity": "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=", + "dev": true }, "path-key": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=" + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true }, "path-parse": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==" + "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", + "dev": true }, "path-to-regexp": { "version": "1.7.0", @@ -9662,6 +14201,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=", + "dev": true, "requires": { "graceful-fs": "^4.1.2", "pify": "^2.0.0", @@ -9672,6 +14212,7 @@ "version": "3.0.17", "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.0.17.tgz", "integrity": "sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA==", + "dev": true, "requires": { "create-hash": "^1.1.2", "create-hmac": "^1.1.4", @@ -9680,93 +14221,82 @@ "sha.js": "^2.4.8" } }, - "pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=", - "dev": true - }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, - "phantomjs-prebuilt": { - "version": "2.1.16", - "resolved": "https://registry.npmjs.org/phantomjs-prebuilt/-/phantomjs-prebuilt-2.1.16.tgz", - "integrity": "sha1-79ISpKOWbTZHaE6ouniFSb4q7+8=", - "dev": true, - "requires": { - "es6-promise": "^4.0.3", - "extract-zip": "^1.6.5", - "fs-extra": "^1.0.0", - "hasha": "^2.2.0", - "kew": "^0.7.0", - "progress": "^1.1.8", - "request": "^2.81.0", - "request-progress": "^2.0.1", - "which": "^1.2.10" - }, - "dependencies": { - "fs-extra": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-1.0.0.tgz", - "integrity": "sha1-zTzl9+fLYUWIP8rjGR6Yd/hYeVA=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "jsonfile": "^2.1.0", - "klaw": "^1.0.0" - } - }, - "jsonfile": { - "version": "2.4.0", - "resolved": "http://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", - "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.6" - } - } - } - }, - "phin": { - "version": "2.9.3", - "resolved": "https://registry.npmjs.org/phin/-/phin-2.9.3.tgz", - "integrity": "sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA==", + "picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", "dev": true }, "pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true }, "pinkie": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", - "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=" + "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=", + "dev": true }, "pinkie-promise": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", + "dev": true, "requires": { "pinkie": "^2.0.0" } }, - "pixelmatch": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/pixelmatch/-/pixelmatch-4.0.2.tgz", - "integrity": "sha1-j0fc7FARtHe2fbA8JDvB8wheiFQ=", + "pirates": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.1.tgz", + "integrity": "sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA==", "dev": true, "requires": { - "pngjs": "^3.0.0" + "node-modules-regexp": "^1.0.0" } }, "pkg-dir": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-1.0.0.tgz", + "integrity": "sha1-ektQio1bstYp1EcFb/TpyTFM89Q=", + "dev": true, + "requires": { + "find-up": "^1.0.0" + }, + "dependencies": { + "find-up": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", + "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", + "dev": true, + "requires": { + "path-exists": "^2.0.0", + "pinkie-promise": "^2.0.0" + } + }, + "path-exists": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", + "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", + "dev": true, + "requires": { + "pinkie-promise": "^2.0.0" + } + } + } + }, + "pkg-up": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz", + "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=", + "dev": true, "requires": { "find-up": "^2.1.0" } @@ -9774,14 +14304,18 @@ "pn": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/pn/-/pn-1.1.0.tgz", - "integrity": "sha512-2qHaIQr2VLRFoxe2nASzsV6ef4yOOH+Fi9FBOVH6cqeSgUnoyySPZkxzLuzd+RYOQTRpROA0ztTMqxROKSb/nA==" - }, - "pngjs": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/pngjs/-/pngjs-3.3.3.tgz", - "integrity": "sha512-1n3Z4p3IOxArEs1VRXnZ/RXdfEniAUS9jb68g58FIXMNkPJeZd+Qh4Uq7/e0LVxAQGos1eIUrqrt4FpjdnEd+Q==", + "integrity": "sha512-2qHaIQr2VLRFoxe2nASzsV6ef4yOOH+Fi9FBOVH6cqeSgUnoyySPZkxzLuzd+RYOQTRpROA0ztTMqxROKSb/nA==", "dev": true }, + "pnp-webpack-plugin": { + "version": "1.6.4", + "resolved": "https://registry.npmjs.org/pnp-webpack-plugin/-/pnp-webpack-plugin-1.6.4.tgz", + "integrity": "sha512-7Wjy+9E3WwLOEL30D+m8TSTF7qJJUJLONBnwQp0518siuMxUQUbgZwssaFX+QKlZkjHZcw/IpZCt/H0srrntSg==", + "dev": true, + "requires": { + "ts-pnp": "^1.1.6" + } + }, "popper.js": { "version": "1.14.6", "resolved": "https://registry.npmjs.org/popper.js/-/popper.js-1.14.6.tgz", @@ -9797,27 +14331,30 @@ } }, "portfinder": { - "version": "1.0.20", - "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.20.tgz", - "integrity": "sha512-Yxe4mTyDzTd59PZJY4ojZR8F+E5e97iq2ZOHPz3HDgSvYC5siNad2tLooQ5y5QHyQhc3xVqvyk/eNA3wuoa7Sw==", + "version": "1.0.25", + "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.25.tgz", + "integrity": "sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg==", + "dev": true, "requires": { - "async": "^1.5.2", - "debug": "^2.2.0", - "mkdirp": "0.5.x" + "async": "^2.6.2", + "debug": "^3.1.1", + "mkdirp": "^0.5.1" }, "dependencies": { - "async": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", - "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=" - }, "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "dev": true, "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true } } }, @@ -9827,986 +14364,880 @@ "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=" }, "postcss": { - "version": "6.0.23", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", - "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", + "version": "7.0.27", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.27.tgz", + "integrity": "sha512-WuQETPMcW9Uf1/22HWUWP9lgsIC+KEHg2kozMflKjbeUtw9ujvFX6QmIfozaErDkmLWS9WEnEdEe6Uo9/BNTdQ==", + "dev": true, "requires": { - "chalk": "^2.4.1", + "chalk": "^2.4.2", "source-map": "^0.6.1", - "supports-color": "^5.4.0" + "supports-color": "^6.1.0" }, "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "supports-color": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } } } }, + "postcss-attribute-case-insensitive": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-attribute-case-insensitive/-/postcss-attribute-case-insensitive-4.0.2.tgz", + "integrity": "sha512-clkFxk/9pcdb4Vkn0hAHq3YnxBQ2p0CGD1dy24jN+reBck+EWxMbxSUqN4Yj7t0w8csl87K6p0gxBe1utkJsYA==", + "dev": true, + "requires": { + "postcss": "^7.0.2", + "postcss-selector-parser": "^6.0.2" + } + }, + "postcss-browser-comments": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-browser-comments/-/postcss-browser-comments-3.0.0.tgz", + "integrity": "sha512-qfVjLfq7HFd2e0HW4s1dvU8X080OZdG46fFbIBFjW7US7YPDcWfRvdElvwMJr2LI6hMmD+7LnH2HcmXTs+uOig==", + "dev": true, + "requires": { + "postcss": "^7" + } + }, "postcss-calc": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-5.3.1.tgz", - "integrity": "sha1-d7rnypKK2FcW4v2kLyYb98HWW14=", + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-7.0.2.tgz", + "integrity": "sha512-rofZFHUg6ZIrvRwPeFktv06GdbDYLcGqh9EwiMutZg+a0oePCCw1zHOEiji6LCpyRcjTREtPASuUqeAvYlEVvQ==", + "dev": true, "requires": { - "postcss": "^5.0.2", - "postcss-message-helpers": "^2.0.0", - "reduce-css-calc": "^1.2.6" + "postcss": "^7.0.27", + "postcss-selector-parser": "^6.0.2", + "postcss-value-parser": "^4.0.2" }, "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } + "postcss-value-parser": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.0.3.tgz", + "integrity": "sha512-N7h4pG+Nnu5BEIzyeaaIYWs0LI5XC40OrRh5L60z0QjFsqGWcHcbkBvpe1WYpcIS9yQ8sOi/vIPt1ejQCrMVrg==", + "dev": true } } }, + "postcss-color-functional-notation": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz", + "integrity": "sha512-ZBARCypjEDofW4P6IdPVTLhDNXPRn8T2s1zHbZidW6rPaaZvcnCS2soYFIQJrMZSxiePJ2XIYTlcb2ztr/eT2g==", + "dev": true, + "requires": { + "postcss": "^7.0.2", + "postcss-values-parser": "^2.0.0" + } + }, + "postcss-color-gray": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/postcss-color-gray/-/postcss-color-gray-5.0.0.tgz", + "integrity": "sha512-q6BuRnAGKM/ZRpfDascZlIZPjvwsRye7UDNalqVz3s7GDxMtqPY6+Q871liNxsonUw8oC61OG+PSaysYpl1bnw==", + "dev": true, + "requires": { + "@csstools/convert-colors": "^1.4.0", + "postcss": "^7.0.5", + "postcss-values-parser": "^2.0.0" + } + }, + "postcss-color-hex-alpha": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/postcss-color-hex-alpha/-/postcss-color-hex-alpha-5.0.3.tgz", + "integrity": "sha512-PF4GDel8q3kkreVXKLAGNpHKilXsZ6xuu+mOQMHWHLPNyjiUBOr75sp5ZKJfmv1MCus5/DWUGcK9hm6qHEnXYw==", + "dev": true, + "requires": { + "postcss": "^7.0.14", + "postcss-values-parser": "^2.0.1" + } + }, + "postcss-color-mod-function": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/postcss-color-mod-function/-/postcss-color-mod-function-3.0.3.tgz", + "integrity": "sha512-YP4VG+xufxaVtzV6ZmhEtc+/aTXH3d0JLpnYfxqTvwZPbJhWqp8bSY3nfNzNRFLgB4XSaBA82OE4VjOOKpCdVQ==", + "dev": true, + "requires": { + "@csstools/convert-colors": "^1.4.0", + "postcss": "^7.0.2", + "postcss-values-parser": "^2.0.0" + } + }, + "postcss-color-rebeccapurple": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-color-rebeccapurple/-/postcss-color-rebeccapurple-4.0.1.tgz", + "integrity": "sha512-aAe3OhkS6qJXBbqzvZth2Au4V3KieR5sRQ4ptb2b2O8wgvB3SJBsdG+jsn2BZbbwekDG8nTfcCNKcSfe/lEy8g==", + "dev": true, + "requires": { + "postcss": "^7.0.2", + "postcss-values-parser": "^2.0.0" + } + }, "postcss-colormin": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-2.2.2.tgz", - "integrity": "sha1-ZjFBfV8OkJo9fsJrJMio0eT5bks=", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-4.0.3.tgz", + "integrity": "sha512-WyQFAdDZpExQh32j0U0feWisZ0dmOtPl44qYmJKkq9xFWY3p+4qnRzCHeNrkeRhwPHz9bQ3mo0/yVkaply0MNw==", + "dev": true, "requires": { - "colormin": "^1.0.5", - "postcss": "^5.0.13", - "postcss-value-parser": "^3.2.3" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "browserslist": "^4.0.0", + "color": "^3.0.0", + "has": "^1.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" } }, "postcss-convert-values": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-2.6.1.tgz", - "integrity": "sha1-u9hZPFwf0uPRwyK7kl3K6Nrk1i0=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-4.0.1.tgz", + "integrity": "sha512-Kisdo1y77KUC0Jmn0OXU/COOJbzM8cImvw1ZFsBgBgMgb1iL23Zs/LXRe3r+EZqM3vGYKdQ2YJVQ5VkJI+zEJQ==", + "dev": true, "requires": { - "postcss": "^5.0.11", - "postcss-value-parser": "^3.1.2" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + } + }, + "postcss-custom-media": { + "version": "7.0.8", + "resolved": "https://registry.npmjs.org/postcss-custom-media/-/postcss-custom-media-7.0.8.tgz", + "integrity": "sha512-c9s5iX0Ge15o00HKbuRuTqNndsJUbaXdiNsksnVH8H4gdc+zbLzr/UasOwNG6CTDpLFekVY4672eWdiiWu2GUg==", + "dev": true, + "requires": { + "postcss": "^7.0.14" + } + }, + "postcss-custom-properties": { + "version": "8.0.11", + "resolved": "https://registry.npmjs.org/postcss-custom-properties/-/postcss-custom-properties-8.0.11.tgz", + "integrity": "sha512-nm+o0eLdYqdnJ5abAJeXp4CEU1c1k+eB2yMCvhgzsds/e0umabFrN6HoTy/8Q4K5ilxERdl/JD1LO5ANoYBeMA==", + "dev": true, + "requires": { + "postcss": "^7.0.17", + "postcss-values-parser": "^2.0.1" } }, - "postcss-discard-comments": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-2.0.4.tgz", - "integrity": "sha1-vv6J+v1bPazlzM5Rt2uBUUvgDj0=", + "postcss-custom-selectors": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/postcss-custom-selectors/-/postcss-custom-selectors-5.1.2.tgz", + "integrity": "sha512-DSGDhqinCqXqlS4R7KGxL1OSycd1lydugJ1ky4iRXPHdBRiozyMHrdu0H3o7qNOCiZwySZTUI5MV0T8QhCLu+w==", + "dev": true, "requires": { - "postcss": "^5.0.14" + "postcss": "^7.0.2", + "postcss-selector-parser": "^5.0.0-rc.3" }, "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } + "cssesc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-2.0.0.tgz", + "integrity": "sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg==", + "dev": true }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "postcss-selector-parser": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-5.0.0.tgz", + "integrity": "sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ==", + "dev": true, "requires": { - "has-flag": "^1.0.0" + "cssesc": "^2.0.0", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" } } } }, - "postcss-discard-duplicates": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-2.1.0.tgz", - "integrity": "sha1-uavye4isGIFYpesSq8riAmO5GTI=", + "postcss-dir-pseudo-class": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/postcss-dir-pseudo-class/-/postcss-dir-pseudo-class-5.0.0.tgz", + "integrity": "sha512-3pm4oq8HYWMZePJY+5ANriPs3P07q+LW6FAdTlkFH2XqDdP4HeeJYMOzn0HYLhRSjBO3fhiqSwwU9xEULSrPgw==", + "dev": true, "requires": { - "postcss": "^5.0.4" + "postcss": "^7.0.2", + "postcss-selector-parser": "^5.0.0-rc.3" }, "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } + "cssesc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-2.0.0.tgz", + "integrity": "sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg==", + "dev": true }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "postcss-selector-parser": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-5.0.0.tgz", + "integrity": "sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ==", + "dev": true, "requires": { - "has-flag": "^1.0.0" + "cssesc": "^2.0.0", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" } } } }, + "postcss-discard-comments": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-4.0.2.tgz", + "integrity": "sha512-RJutN259iuRf3IW7GZyLM5Sw4GLTOH8FmsXBnv8Ab/Tc2k4SR4qbV4DNbyyY4+Sjo362SyDmW2DQ7lBSChrpkg==", + "dev": true, + "requires": { + "postcss": "^7.0.0" + } + }, + "postcss-discard-duplicates": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-4.0.2.tgz", + "integrity": "sha512-ZNQfR1gPNAiXZhgENFfEglF93pciw0WxMkJeVmw8eF+JZBbMD7jp6C67GqJAXVZP2BWbOztKfbsdmMp/k8c6oQ==", + "dev": true, + "requires": { + "postcss": "^7.0.0" + } + }, "postcss-discard-empty": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-2.1.0.tgz", - "integrity": "sha1-0rS9nVztXr2Nyt52QMfXzX9PkrU=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-4.0.1.tgz", + "integrity": "sha512-B9miTzbznhDjTfjvipfHoqbWKwd0Mj+/fL5s1QOz06wufguil+Xheo4XpOnc4NqKYBCNqqEzgPv2aPBIJLox0w==", + "dev": true, "requires": { - "postcss": "^5.0.14" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "postcss": "^7.0.0" } }, "postcss-discard-overridden": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-0.1.1.tgz", - "integrity": "sha1-ix6vVU9ob7KIzYdMVWZ7CqNmjVg=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-4.0.1.tgz", + "integrity": "sha512-IYY2bEDD7g1XM1IDEsUT4//iEYCxAmP5oDSFMVU/JVvT7gh+l4fmjciLqGgwjdWpQIdb0Che2VX00QObS5+cTg==", + "dev": true, "requires": { - "postcss": "^5.0.16" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "postcss": "^7.0.0" } }, - "postcss-discard-unused": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-2.2.3.tgz", - "integrity": "sha1-vOMLLMWR/8Y0Mitfs0ZLbZNPRDM=", + "postcss-double-position-gradients": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postcss-double-position-gradients/-/postcss-double-position-gradients-1.0.0.tgz", + "integrity": "sha512-G+nV8EnQq25fOI8CH/B6krEohGWnF5+3A6H/+JEpOncu5dCnkS1QQ6+ct3Jkaepw1NGVqqOZH6lqrm244mCftA==", + "dev": true, "requires": { - "postcss": "^5.0.14", - "uniqs": "^2.0.0" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "postcss": "^7.0.5", + "postcss-values-parser": "^2.0.0" } }, - "postcss-filter-plugins": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/postcss-filter-plugins/-/postcss-filter-plugins-2.0.3.tgz", - "integrity": "sha512-T53GVFsdinJhgwm7rg1BzbeBRomOg9y5MBVhGcsV0CxurUdVj1UlPdKtn7aqYA/c/QVkzKMjq2bSV5dKG5+AwQ==", + "postcss-env-function": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/postcss-env-function/-/postcss-env-function-2.0.2.tgz", + "integrity": "sha512-rwac4BuZlITeUbiBq60h/xbLzXY43qOsIErngWa4l7Mt+RaSkT7QBjXVGTcBHupykkblHMDrBFh30zchYPaOUw==", + "dev": true, "requires": { - "postcss": "^5.0.4" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "postcss": "^7.0.2", + "postcss-values-parser": "^2.0.0" } }, "postcss-flexbugs-fixes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-3.2.0.tgz", - "integrity": "sha512-0AuD9HG1Ey3/3nqPWu9yqf7rL0KCPu5VgjDsjf5mzEcuo9H/z8nco/fljKgjsOUrZypa95MI0kS4xBZeBzz2lw==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/postcss-flexbugs-fixes/-/postcss-flexbugs-fixes-4.1.0.tgz", + "integrity": "sha512-jr1LHxQvStNNAHlgco6PzY308zvLklh7SJVYuWUwyUQncofaAlD2l+P/gxKHOdqWKe7xJSkVLFF/2Tp+JqMSZA==", + "dev": true, "requires": { - "postcss": "^6.0.1" + "postcss": "^7.0.0" } }, - "postcss-load-config": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-1.2.0.tgz", - "integrity": "sha1-U56a/J3chiASHr+djDZz4M5Q0oo=", + "postcss-focus-visible": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-focus-visible/-/postcss-focus-visible-4.0.0.tgz", + "integrity": "sha512-Z5CkWBw0+idJHSV6+Bgf2peDOFf/x4o+vX/pwcNYrWpXFrSfTkQ3JQ1ojrq9yS+upnAlNRHeg8uEwFTgorjI8g==", + "dev": true, "requires": { - "cosmiconfig": "^2.1.0", - "object-assign": "^4.1.0", - "postcss-load-options": "^1.2.0", - "postcss-load-plugins": "^2.3.0" + "postcss": "^7.0.2" } }, - "postcss-load-options": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/postcss-load-options/-/postcss-load-options-1.2.0.tgz", - "integrity": "sha1-sJixVZ3awt8EvAuzdfmaXP4rbYw=", + "postcss-focus-within": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-focus-within/-/postcss-focus-within-3.0.0.tgz", + "integrity": "sha512-W0APui8jQeBKbCGZudW37EeMCjDeVxKgiYfIIEo8Bdh5SpB9sxds/Iq8SEuzS0Q4YFOlG7EPFulbbxujpkrV2w==", + "dev": true, "requires": { - "cosmiconfig": "^2.1.0", - "object-assign": "^4.1.0" + "postcss": "^7.0.2" } }, - "postcss-load-plugins": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/postcss-load-plugins/-/postcss-load-plugins-2.3.0.tgz", - "integrity": "sha1-dFdoEWWZrKLwCfrUJrABdQSdjZI=", + "postcss-font-variant": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-font-variant/-/postcss-font-variant-4.0.0.tgz", + "integrity": "sha512-M8BFYKOvCrI2aITzDad7kWuXXTm0YhGdP9Q8HanmN4EF1Hmcgs1KK5rSHylt/lUJe8yLxiSwWAHdScoEiIxztg==", + "dev": true, "requires": { - "cosmiconfig": "^2.1.1", - "object-assign": "^4.1.0" + "postcss": "^7.0.2" } }, - "postcss-loader": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/postcss-loader/-/postcss-loader-2.0.8.tgz", - "integrity": "sha512-KtXBiQ/r/WYW8LxTSJK7h8wLqvCMSub/BqmRnud/Mu8RzwflW9cmXxwsMwbn15TNv287Hcufdb3ZSs7xHKnG8Q==", + "postcss-gap-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postcss-gap-properties/-/postcss-gap-properties-2.0.0.tgz", + "integrity": "sha512-QZSqDaMgXCHuHTEzMsS2KfVDOq7ZFiknSpkrPJY6jmxbugUPTuSzs/vuE5I3zv0WAS+3vhrlqhijiprnuQfzmg==", + "dev": true, "requires": { - "loader-utils": "^1.1.0", - "postcss": "^6.0.0", - "postcss-load-config": "^1.2.0", - "schema-utils": "^0.3.0" + "postcss": "^7.0.2" } }, - "postcss-merge-idents": { - "version": "2.1.7", - "resolved": "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-2.1.7.tgz", - "integrity": "sha1-TFUwMTwI4dWzu/PSu8dH4njuonA=", + "postcss-image-set-function": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/postcss-image-set-function/-/postcss-image-set-function-3.0.1.tgz", + "integrity": "sha512-oPTcFFip5LZy8Y/whto91L9xdRHCWEMs3e1MdJxhgt4jy2WYXfhkng59fH5qLXSCPN8k4n94p1Czrfe5IOkKUw==", + "dev": true, "requires": { - "has": "^1.0.1", - "postcss": "^5.0.10", - "postcss-value-parser": "^3.1.1" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "postcss": "^7.0.2", + "postcss-values-parser": "^2.0.0" } }, - "postcss-merge-longhand": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-2.0.2.tgz", - "integrity": "sha1-I9kM0Sewp3mUkVMyc5A0oaTz1lg=", + "postcss-initial": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/postcss-initial/-/postcss-initial-3.0.2.tgz", + "integrity": "sha512-ugA2wKonC0xeNHgirR4D3VWHs2JcU08WAi1KFLVcnb7IN89phID6Qtg2RIctWbnvp1TM2BOmDtX8GGLCKdR8YA==", + "dev": true, "requires": { - "postcss": "^5.0.4" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "lodash.template": "^4.5.0", + "postcss": "^7.0.2" } }, - "postcss-merge-rules": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-2.1.2.tgz", - "integrity": "sha1-0d9d+qexrMO+VT8OnhDofGG19yE=", + "postcss-lab-function": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/postcss-lab-function/-/postcss-lab-function-2.0.1.tgz", + "integrity": "sha512-whLy1IeZKY+3fYdqQFuDBf8Auw+qFuVnChWjmxm/UhHWqNHZx+B99EwxTvGYmUBqe3Fjxs4L1BoZTJmPu6usVg==", + "dev": true, "requires": { - "browserslist": "^1.5.2", - "caniuse-api": "^1.5.2", - "postcss": "^5.0.4", - "postcss-selector-parser": "^2.2.2", - "vendors": "^1.0.0" - }, - "dependencies": { - "browserslist": { - "version": "1.7.7", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-1.7.7.tgz", - "integrity": "sha1-C9dnBCWL6CmyOYu1Dkti0aFmsLk=", - "requires": { - "caniuse-db": "^1.0.30000639", - "electron-to-chromium": "^1.2.7" - } - }, - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "@csstools/convert-colors": "^1.4.0", + "postcss": "^7.0.2", + "postcss-values-parser": "^2.0.0" + } + }, + "postcss-load-config": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-2.1.0.tgz", + "integrity": "sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q==", + "dev": true, + "requires": { + "cosmiconfig": "^5.0.0", + "import-cwd": "^2.0.0" + } + }, + "postcss-loader": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-loader/-/postcss-loader-3.0.0.tgz", + "integrity": "sha512-cLWoDEY5OwHcAjDnkyRQzAXfs2jrKjXpO/HQFcc5b5u/r7aa471wdmChmwfnv7x2u840iat/wi0lQ5nbRgSkUA==", + "dev": true, + "requires": { + "loader-utils": "^1.1.0", + "postcss": "^7.0.0", + "postcss-load-config": "^2.0.0", + "schema-utils": "^1.0.0" + } + }, + "postcss-logical": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-logical/-/postcss-logical-3.0.0.tgz", + "integrity": "sha512-1SUKdJc2vuMOmeItqGuNaC+N8MzBWFWEkAnRnLpFYj1tGGa7NqyVBujfRtgNa2gXR+6RkGUiB2O5Vmh7E2RmiA==", + "dev": true, + "requires": { + "postcss": "^7.0.2" } }, - "postcss-message-helpers": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/postcss-message-helpers/-/postcss-message-helpers-2.0.0.tgz", - "integrity": "sha1-pPL0+rbk/gAvCu0ABHjN9S+bpg4=" + "postcss-media-minmax": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-media-minmax/-/postcss-media-minmax-4.0.0.tgz", + "integrity": "sha512-fo9moya6qyxsjbFAYl97qKO9gyre3qvbMnkOZeZwlsW6XYFsvs2DMGDlchVLfAd8LHPZDxivu/+qW2SMQeTHBw==", + "dev": true, + "requires": { + "postcss": "^7.0.2" + } }, - "postcss-minify-font-values": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-1.0.5.tgz", - "integrity": "sha1-S1jttWZB66fIR0qzUmyv17vey2k=", + "postcss-merge-longhand": { + "version": "4.0.11", + "resolved": "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-4.0.11.tgz", + "integrity": "sha512-alx/zmoeXvJjp7L4mxEMjh8lxVlDFX1gqWHzaaQewwMZiVhLo42TEClKaeHbRf6J7j82ZOdTJ808RtN0ZOZwvw==", + "dev": true, "requires": { - "object-assign": "^4.0.1", - "postcss": "^5.0.4", - "postcss-value-parser": "^3.0.2" + "css-color-names": "0.0.4", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0", + "stylehacks": "^4.0.0" + } + }, + "postcss-merge-rules": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-4.0.3.tgz", + "integrity": "sha512-U7e3r1SbvYzO0Jr3UT/zKBVgYYyhAz0aitvGIYOYK5CPmkNih+WDSsS5tvPrJ8YMQYlEMvsZIiqmn7HdFUaeEQ==", + "dev": true, + "requires": { + "browserslist": "^4.0.0", + "caniuse-api": "^3.0.0", + "cssnano-util-same-parent": "^4.0.0", + "postcss": "^7.0.0", + "postcss-selector-parser": "^3.0.0", + "vendors": "^1.0.0" }, "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "postcss-selector-parser": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz", + "integrity": "sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==", + "dev": true, "requires": { - "has-flag": "^1.0.0" + "dot-prop": "^5.2.0", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" } } } }, + "postcss-minify-font-values": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-4.0.2.tgz", + "integrity": "sha512-j85oO6OnRU9zPf04+PZv1LYIYOprWm6IA6zkXkrJXyRveDEuQggG6tvoy8ir8ZwjLxLuGfNkCZEQG7zan+Hbtg==", + "dev": true, + "requires": { + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + } + }, "postcss-minify-gradients": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-1.0.5.tgz", - "integrity": "sha1-Xb2hE3NwP4PPtKPqOIHY11/15uE=", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-4.0.2.tgz", + "integrity": "sha512-qKPfwlONdcf/AndP1U8SJ/uzIJtowHlMaSioKzebAXSG4iJthlWC9iSWznQcX4f66gIWX44RSA841HTHj3wK+Q==", + "dev": true, "requires": { - "postcss": "^5.0.12", - "postcss-value-parser": "^3.3.0" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "cssnano-util-get-arguments": "^4.0.0", + "is-color-stop": "^1.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" } }, "postcss-minify-params": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-1.2.2.tgz", - "integrity": "sha1-rSzgcTc7lDs9kwo/pZo1jCjW8fM=", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-4.0.2.tgz", + "integrity": "sha512-G7eWyzEx0xL4/wiBBJxJOz48zAKV2WG3iZOqVhPet/9geefm/Px5uo1fzlHu+DOjT+m0Mmiz3jkQzVHe6wxAWg==", + "dev": true, "requires": { - "alphanum-sort": "^1.0.1", - "postcss": "^5.0.2", - "postcss-value-parser": "^3.0.2", + "alphanum-sort": "^1.0.0", + "browserslist": "^4.0.0", + "cssnano-util-get-arguments": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0", "uniqs": "^2.0.0" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } } }, "postcss-minify-selectors": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-2.1.1.tgz", - "integrity": "sha1-ssapjAByz5G5MtGkllCBFDEXNb8=", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-4.0.2.tgz", + "integrity": "sha512-D5S1iViljXBj9kflQo4YutWnJmwm8VvIsU1GeXJGiG9j8CIg9zs4voPMdQDUmIxetUOh60VilsNzCiAFTOqu3g==", + "dev": true, "requires": { - "alphanum-sort": "^1.0.2", - "has": "^1.0.1", - "postcss": "^5.0.14", - "postcss-selector-parser": "^2.0.0" + "alphanum-sort": "^1.0.0", + "has": "^1.0.0", + "postcss": "^7.0.0", + "postcss-selector-parser": "^3.0.0" }, "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "postcss-selector-parser": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz", + "integrity": "sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==", + "dev": true, "requires": { - "has-flag": "^1.0.0" + "dot-prop": "^5.2.0", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" } } } }, "postcss-modules-extract-imports": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.2.1.tgz", - "integrity": "sha512-6jt9XZwUhwmRUhb/CkyJY020PYaPJsCyt3UjbaWo6XEbH/94Hmv6MP7fG2C5NDU/BcHzyGYxNtHvM+LTf9HrYw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz", + "integrity": "sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ==", + "dev": true, "requires": { - "postcss": "^6.0.1" + "postcss": "^7.0.5" } }, "postcss-modules-local-by-default": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-1.2.0.tgz", - "integrity": "sha1-99gMOYxaOT+nlkRmvRlQCn1hwGk=", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-3.0.2.tgz", + "integrity": "sha512-jM/V8eqM4oJ/22j0gx4jrp63GSvDH6v86OqyTHHUvk4/k1vceipZsaymiZ5PvocqZOl5SFHiFJqjs3la0wnfIQ==", + "dev": true, "requires": { - "css-selector-tokenizer": "^0.7.0", - "postcss": "^6.0.1" + "icss-utils": "^4.1.1", + "postcss": "^7.0.16", + "postcss-selector-parser": "^6.0.2", + "postcss-value-parser": "^4.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.0.3.tgz", + "integrity": "sha512-N7h4pG+Nnu5BEIzyeaaIYWs0LI5XC40OrRh5L60z0QjFsqGWcHcbkBvpe1WYpcIS9yQ8sOi/vIPt1ejQCrMVrg==", + "dev": true + } } }, "postcss-modules-scope": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-1.1.0.tgz", - "integrity": "sha1-1upkmUx5+XtipytCb75gVqGUu5A=", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-2.2.0.tgz", + "integrity": "sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ==", + "dev": true, "requires": { - "css-selector-tokenizer": "^0.7.0", - "postcss": "^6.0.1" + "postcss": "^7.0.6", + "postcss-selector-parser": "^6.0.0" } }, "postcss-modules-values": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-1.3.0.tgz", - "integrity": "sha1-7P+p1+GSUYOJ9CrQ6D9yrsRW6iA=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-3.0.0.tgz", + "integrity": "sha512-1//E5jCBrZ9DmRX+zCtmQtRSV6PV42Ix7Bzj9GbwJceduuf7IqP8MgeTXuRDHOWj2m0VzZD5+roFWDuU8RQjcg==", + "dev": true, + "requires": { + "icss-utils": "^4.0.0", + "postcss": "^7.0.6" + } + }, + "postcss-nesting": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/postcss-nesting/-/postcss-nesting-7.0.1.tgz", + "integrity": "sha512-FrorPb0H3nuVq0Sff7W2rnc3SmIcruVC6YwpcS+k687VxyxO33iE1amna7wHuRVzM8vfiYofXSBHNAZ3QhLvYg==", + "dev": true, + "requires": { + "postcss": "^7.0.2" + } + }, + "postcss-normalize": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/postcss-normalize/-/postcss-normalize-8.0.1.tgz", + "integrity": "sha512-rt9JMS/m9FHIRroDDBGSMsyW1c0fkvOJPy62ggxSHUldJO7B195TqFMqIf+lY5ezpDcYOV4j86aUp3/XbxzCCQ==", + "dev": true, "requires": { - "icss-replace-symbols": "^1.1.0", - "postcss": "^6.0.1" + "@csstools/normalize.css": "^10.1.0", + "browserslist": "^4.6.2", + "postcss": "^7.0.17", + "postcss-browser-comments": "^3.0.0", + "sanitize.css": "^10.0.0" } }, "postcss-normalize-charset": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-1.1.1.tgz", - "integrity": "sha1-757nEhLX/nWceO0WL2HtYrXLk/E=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-4.0.1.tgz", + "integrity": "sha512-gMXCrrlWh6G27U0hF3vNvR3w8I1s2wOBILvA87iNXaPvSNo5uZAMYsZG7XjCUf1eVxuPfyL4TJ7++SGZLc9A3g==", + "dev": true, "requires": { - "postcss": "^5.0.5" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "postcss": "^7.0.0" + } + }, + "postcss-normalize-display-values": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-4.0.2.tgz", + "integrity": "sha512-3F2jcsaMW7+VtRMAqf/3m4cPFhPD3EFRgNs18u+k3lTJJlVe7d0YPO+bnwqo2xg8YiRpDXJI2u8A0wqJxMsQuQ==", + "dev": true, + "requires": { + "cssnano-util-get-match": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + } + }, + "postcss-normalize-positions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-4.0.2.tgz", + "integrity": "sha512-Dlf3/9AxpxE+NF1fJxYDeggi5WwV35MXGFnnoccP/9qDtFrTArZ0D0R+iKcg5WsUd8nUYMIl8yXDCtcrT8JrdA==", + "dev": true, + "requires": { + "cssnano-util-get-arguments": "^4.0.0", + "has": "^1.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + } + }, + "postcss-normalize-repeat-style": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-4.0.2.tgz", + "integrity": "sha512-qvigdYYMpSuoFs3Is/f5nHdRLJN/ITA7huIoCyqqENJe9PvPmLhNLMu7QTjPdtnVf6OcYYO5SHonx4+fbJE1+Q==", + "dev": true, + "requires": { + "cssnano-util-get-arguments": "^4.0.0", + "cssnano-util-get-match": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + } + }, + "postcss-normalize-string": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-4.0.2.tgz", + "integrity": "sha512-RrERod97Dnwqq49WNz8qo66ps0swYZDSb6rM57kN2J+aoyEAJfZ6bMx0sx/F9TIEX0xthPGCmeyiam/jXif0eA==", + "dev": true, + "requires": { + "has": "^1.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + } + }, + "postcss-normalize-timing-functions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-4.0.2.tgz", + "integrity": "sha512-acwJY95edP762e++00Ehq9L4sZCEcOPyaHwoaFOhIwWCDfik6YvqsYNxckee65JHLKzuNSSmAdxwD2Cud1Z54A==", + "dev": true, + "requires": { + "cssnano-util-get-match": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + } + }, + "postcss-normalize-unicode": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-4.0.1.tgz", + "integrity": "sha512-od18Uq2wCYn+vZ/qCOeutvHjB5jm57ToxRaMeNuf0nWVHaP9Hua56QyMF6fs/4FSUnVIw0CBPsU0K4LnBPwYwg==", + "dev": true, + "requires": { + "browserslist": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" } }, "postcss-normalize-url": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-3.0.8.tgz", - "integrity": "sha1-EI90s/L82viRov+j6kWSJ5/HgiI=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-4.0.1.tgz", + "integrity": "sha512-p5oVaF4+IHwu7VpMan/SSpmpYxcJMtkGppYf0VbdH5B6hN8YNmVyJLuY9FmLQTzY3fag5ESUUHDqM+heid0UVA==", + "dev": true, "requires": { "is-absolute-url": "^2.0.0", - "normalize-url": "^1.4.0", - "postcss": "^5.0.14", - "postcss-value-parser": "^3.2.3" + "normalize-url": "^3.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" }, "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } + "normalize-url": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-3.3.0.tgz", + "integrity": "sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg==", + "dev": true } } }, + "postcss-normalize-whitespace": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-4.0.2.tgz", + "integrity": "sha512-tO8QIgrsI3p95r8fyqKV+ufKlSHh9hMJqACqbv2XknufqEDhDvbguXGBBqxw9nsQoXWf0qOqppziKJKHMD4GtA==", + "dev": true, + "requires": { + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + } + }, "postcss-ordered-values": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-2.2.3.tgz", - "integrity": "sha1-7sbCpntsQSqNsgQud/6NpD+VwR0=", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-4.1.2.tgz", + "integrity": "sha512-2fCObh5UanxvSxeXrtLtlwVThBvHn6MQcu4ksNT2tsaV2Fg76R2CV98W7wNSlX+5/pFwEyaDwKLLoEV7uRybAw==", + "dev": true, "requires": { - "postcss": "^5.0.4", - "postcss-value-parser": "^3.0.1" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "cssnano-util-get-arguments": "^4.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" } }, - "postcss-reduce-idents": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-2.4.0.tgz", - "integrity": "sha1-wsbSDMlYKE9qv75j92Cb9AkFmtM=", + "postcss-overflow-shorthand": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postcss-overflow-shorthand/-/postcss-overflow-shorthand-2.0.0.tgz", + "integrity": "sha512-aK0fHc9CBNx8jbzMYhshZcEv8LtYnBIRYQD5i7w/K/wS9c2+0NSR6B3OVMu5y0hBHYLcMGjfU+dmWYNKH0I85g==", + "dev": true, "requires": { - "postcss": "^5.0.4", - "postcss-value-parser": "^3.0.2" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "postcss": "^7.0.2" + } + }, + "postcss-page-break": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postcss-page-break/-/postcss-page-break-2.0.0.tgz", + "integrity": "sha512-tkpTSrLpfLfD9HvgOlJuigLuk39wVTbbd8RKcy8/ugV2bNBUW3xU+AIqyxhDrQr1VUj1RmyJrBn1YWrqUm9zAQ==", + "dev": true, + "requires": { + "postcss": "^7.0.2" + } + }, + "postcss-place": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-place/-/postcss-place-4.0.1.tgz", + "integrity": "sha512-Zb6byCSLkgRKLODj/5mQugyuj9bvAAw9LqJJjgwz5cYryGeXfFZfSXoP1UfveccFmeq0b/2xxwcTEVScnqGxBg==", + "dev": true, + "requires": { + "postcss": "^7.0.2", + "postcss-values-parser": "^2.0.0" } }, - "postcss-reduce-initial": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-1.0.1.tgz", - "integrity": "sha1-aPgGlfBF0IJjqHmtJA343WT2ROo=", + "postcss-preset-env": { + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/postcss-preset-env/-/postcss-preset-env-6.7.0.tgz", + "integrity": "sha512-eU4/K5xzSFwUFJ8hTdTQzo2RBLbDVt83QZrAvI07TULOkmyQlnYlpwep+2yIK+K+0KlZO4BvFcleOCCcUtwchg==", + "dev": true, + "requires": { + "autoprefixer": "^9.6.1", + "browserslist": "^4.6.4", + "caniuse-lite": "^1.0.30000981", + "css-blank-pseudo": "^0.1.4", + "css-has-pseudo": "^0.10.0", + "css-prefers-color-scheme": "^3.1.1", + "cssdb": "^4.4.0", + "postcss": "^7.0.17", + "postcss-attribute-case-insensitive": "^4.0.1", + "postcss-color-functional-notation": "^2.0.1", + "postcss-color-gray": "^5.0.0", + "postcss-color-hex-alpha": "^5.0.3", + "postcss-color-mod-function": "^3.0.3", + "postcss-color-rebeccapurple": "^4.0.1", + "postcss-custom-media": "^7.0.8", + "postcss-custom-properties": "^8.0.11", + "postcss-custom-selectors": "^5.1.2", + "postcss-dir-pseudo-class": "^5.0.0", + "postcss-double-position-gradients": "^1.0.0", + "postcss-env-function": "^2.0.2", + "postcss-focus-visible": "^4.0.0", + "postcss-focus-within": "^3.0.0", + "postcss-font-variant": "^4.0.0", + "postcss-gap-properties": "^2.0.0", + "postcss-image-set-function": "^3.0.1", + "postcss-initial": "^3.0.0", + "postcss-lab-function": "^2.0.1", + "postcss-logical": "^3.0.0", + "postcss-media-minmax": "^4.0.0", + "postcss-nesting": "^7.0.0", + "postcss-overflow-shorthand": "^2.0.0", + "postcss-page-break": "^2.0.0", + "postcss-place": "^4.0.1", + "postcss-pseudo-class-any-link": "^6.0.0", + "postcss-replace-overflow-wrap": "^3.0.0", + "postcss-selector-matches": "^4.0.0", + "postcss-selector-not": "^4.0.0" + } + }, + "postcss-pseudo-class-any-link": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-pseudo-class-any-link/-/postcss-pseudo-class-any-link-6.0.0.tgz", + "integrity": "sha512-lgXW9sYJdLqtmw23otOzrtbDXofUdfYzNm4PIpNE322/swES3VU9XlXHeJS46zT2onFO7V1QFdD4Q9LiZj8mew==", + "dev": true, "requires": { - "postcss": "^5.0.4" + "postcss": "^7.0.2", + "postcss-selector-parser": "^5.0.0-rc.3" }, "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } + "cssesc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-2.0.0.tgz", + "integrity": "sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg==", + "dev": true }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "postcss-selector-parser": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-5.0.0.tgz", + "integrity": "sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ==", + "dev": true, "requires": { - "has-flag": "^1.0.0" + "cssesc": "^2.0.0", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" } } } }, + "postcss-reduce-initial": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-4.0.3.tgz", + "integrity": "sha512-gKWmR5aUulSjbzOfD9AlJiHCGH6AEVLaM0AV+aSioxUDd16qXP1PCh8d1/BGVvpdWn8k/HiK7n6TjeoXN1F7DA==", + "dev": true, + "requires": { + "browserslist": "^4.0.0", + "caniuse-api": "^3.0.0", + "has": "^1.0.0", + "postcss": "^7.0.0" + } + }, "postcss-reduce-transforms": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-1.0.4.tgz", - "integrity": "sha1-/3b02CEkN7McKYpC0uFEQCV3GuE=", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-4.0.2.tgz", + "integrity": "sha512-EEVig1Q2QJ4ELpJXMZR8Vt5DQx8/mo+dGWSR7vWXqcob2gQLyQGsionYcGKATXvQzMPn6DSN1vTN7yFximdIAg==", + "dev": true, "requires": { - "has": "^1.0.1", - "postcss": "^5.0.8", - "postcss-value-parser": "^3.0.1" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "cssnano-util-get-match": "^4.0.0", + "has": "^1.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0" + } + }, + "postcss-replace-overflow-wrap": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-replace-overflow-wrap/-/postcss-replace-overflow-wrap-3.0.0.tgz", + "integrity": "sha512-2T5hcEHArDT6X9+9dVSPQdo7QHzG4XKclFT8rU5TzJPDN7RIRTbO9c4drUISOVemLj03aezStHCR2AIcr8XLpw==", + "dev": true, + "requires": { + "postcss": "^7.0.2" + } + }, + "postcss-safe-parser": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-safe-parser/-/postcss-safe-parser-4.0.1.tgz", + "integrity": "sha512-xZsFA3uX8MO3yAda03QrG3/Eg1LN3EPfjjf07vke/46HERLZyHrTsQ9E1r1w1W//fWEhtYNndo2hQplN2cVpCQ==", + "dev": true, + "requires": { + "postcss": "^7.0.0" + } + }, + "postcss-selector-matches": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz", + "integrity": "sha512-LgsHwQR/EsRYSqlwdGzeaPKVT0Ml7LAT6E75T8W8xLJY62CE4S/l03BWIt3jT8Taq22kXP08s2SfTSzaraoPww==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "postcss": "^7.0.2" + } + }, + "postcss-selector-not": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/postcss-selector-not/-/postcss-selector-not-4.0.0.tgz", + "integrity": "sha512-W+bkBZRhqJaYN8XAnbbZPLWMvZD1wKTu0UxtFKdhtGjWYmxhkUneoeOhRJKdAE5V7ZTlnbHfCR+6bNwK9e1dTQ==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "postcss": "^7.0.2" } }, "postcss-selector-parser": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-2.2.3.tgz", - "integrity": "sha1-+UN3iGBsPJrO4W/+jYsWKX8nu5A=", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz", + "integrity": "sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg==", + "dev": true, "requires": { - "flatten": "^1.0.2", + "cssesc": "^3.0.0", "indexes-of": "^1.0.1", "uniq": "^1.0.1" } }, "postcss-svgo": { - "version": "2.1.6", - "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-2.1.6.tgz", - "integrity": "sha1-tt8YqmE7Zm4TPwittSGcJoSsEI0=", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-4.0.2.tgz", + "integrity": "sha512-C6wyjo3VwFm0QgBy+Fu7gCYOkCmgmClghO+pjcxvrcBKtiKt0uCF+hvbMO1fyv5BMImRK90SMb+dwUnfbGd+jw==", + "dev": true, "requires": { - "is-svg": "^2.0.0", - "postcss": "^5.0.14", - "postcss-value-parser": "^3.2.3", - "svgo": "^0.7.0" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "is-svg": "^3.0.0", + "postcss": "^7.0.0", + "postcss-value-parser": "^3.0.0", + "svgo": "^1.0.0" } }, "postcss-unique-selectors": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-2.0.2.tgz", - "integrity": "sha1-mB1X0p3csz57Hf4f1DuGSfkzyh0=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-4.0.1.tgz", + "integrity": "sha512-+JanVaryLo9QwZjKrmJgkI4Fn8SBgRO6WXQBJi7KiAVPlmxikB5Jzc4EvXMT2H0/m0RjrVVm9rGNhZddm/8Spg==", + "dev": true, "requires": { - "alphanum-sort": "^1.0.1", - "postcss": "^5.0.4", + "alphanum-sort": "^1.0.0", + "postcss": "^7.0.0", "uniqs": "^2.0.0" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } } }, "postcss-value-parser": { @@ -10814,106 +15245,99 @@ "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==" }, - "postcss-zindex": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-2.2.0.tgz", - "integrity": "sha1-0hCd3AVbka9n/EyzsCWUZjnSryI=", + "postcss-values-parser": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/postcss-values-parser/-/postcss-values-parser-2.0.1.tgz", + "integrity": "sha512-2tLuBsA6P4rYTNKCXYG/71C7j1pU6pK503suYOmn4xYrQIzW+opD+7FAFNuGSdZC/3Qfy334QbeMu7MEb8gOxg==", + "dev": true, "requires": { - "has": "^1.0.1", - "postcss": "^5.0.4", - "uniqs": "^2.0.0" - }, - "dependencies": { - "has-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", - "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" - }, - "postcss": { - "version": "5.2.18", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", - "requires": { - "chalk": "^1.1.3", - "js-base64": "^2.1.9", - "source-map": "^0.5.6", - "supports-color": "^3.2.3" - } - }, - "supports-color": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", - "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", - "requires": { - "has-flag": "^1.0.0" - } - } + "flatten": "^1.0.2", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" } }, "prelude-ls": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", - "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=" + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "dev": true }, "prepend-http": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz", - "integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=" + "integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=", + "dev": true }, - "preserve": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/preserve/-/preserve-0.2.0.tgz", - "integrity": "sha1-gV7R9uvGWSb4ZbMQwHE7yzMVzks=" + "prettier": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "dev": true }, "pretty-bytes": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-4.0.2.tgz", - "integrity": "sha1-sr+C5zUNZcbDOqlaqlpPYyf2HNk=" + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.3.0.tgz", + "integrity": "sha512-hjGrh+P926p4R4WbaB6OckyRtO0F0/lQBiT+0gnxjV+5kjPBrfVBFCsCLbMqVQeydvIoouYTCmmEURiH3R1Bdg==", + "dev": true }, "pretty-error": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-2.1.1.tgz", "integrity": "sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM=", + "dev": true, "requires": { "renderkid": "^2.0.1", "utila": "~0.4" } }, "pretty-format": { - "version": "22.4.3", - "resolved": "http://registry.npmjs.org/pretty-format/-/pretty-format-22.4.3.tgz", - "integrity": "sha512-S4oT9/sT6MN7/3COoOy+ZJeA92VmOnveLHgrwBE3Z1W5N9S2A1QGNYiE1z75DAENbJrXXUb+OWXhpJcg05QKQQ==", + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-24.9.0.tgz", + "integrity": "sha512-00ZMZUiHaJrNfk33guavqgvfJS30sLYf0f8+Srklv0AMPodGGHcoHgksZ3OThYnIvOd+8yMCn0YiEOogjlgsnA==", + "dev": true, "requires": { - "ansi-regex": "^3.0.0", - "ansi-styles": "^3.2.0" + "@jest/types": "^24.9.0", + "ansi-regex": "^4.0.0", + "ansi-styles": "^3.2.0", + "react-is": "^16.8.4" }, "dependencies": { "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "dev": true } } }, "private": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/private/-/private-0.1.8.tgz", - "integrity": "sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg==" + "integrity": "sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg==", + "dev": true }, "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=" + "integrity": "sha1-czIwDoQBYb2j5podHZGn1LwW8YI=", + "dev": true }, "process-nextick-args": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", - "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==" + "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==", + "dev": true }, "progress": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/progress/-/progress-1.1.8.tgz", - "integrity": "sha1-4mDHj2Fhzdmw5WzD4Khd4Xx6V74=", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", "dev": true }, "promise": { @@ -10927,7 +15351,18 @@ "promise-inflight": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", - "integrity": "sha1-mEcocL8igTL8vdhoEputEsPAKeM=" + "integrity": "sha1-mEcocL8igTL8vdhoEputEsPAKeM=", + "dev": true + }, + "prompts": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.3.2.tgz", + "integrity": "sha512-Q06uKs2CkNYVID0VqwfAl9mipo99zkBv/n2JtWY89Yxa3ZabWSrs0e2KTudKVa3peLUvYXMefDqIleLPVUBZMA==", + "dev": true, + "requires": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.4" + } }, "prop-types": { "version": "15.6.2", @@ -10938,40 +15373,44 @@ "object-assign": "^4.1.1" } }, + "prop-types-exact": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/prop-types-exact/-/prop-types-exact-1.2.0.tgz", + "integrity": "sha512-K+Tk3Kd9V0odiXFP9fwDHUYRyvK3Nun3GVyPapSIs5OBkITAm15W0CPFD/YKTkMUAbc0b9CUwRQp2ybiBIq+eA==", + "dev": true, + "requires": { + "has": "^1.0.3", + "object.assign": "^4.1.0", + "reflect.ownkeys": "^0.2.0" + } + }, "proxy-addr": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz", - "integrity": "sha512-5erio2h9jp5CHGwcybmxmVqHmnCBZeewlfJ0pex+UW7Qny7OOZXTtH56TGNyBizkgiOwhJtMKrVzDTeKcySZwA==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", + "integrity": "sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==", + "dev": true, "requires": { "forwarded": "~0.1.2", - "ipaddr.js": "1.8.0" + "ipaddr.js": "1.9.1" } }, - "proxy-from-env": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.0.0.tgz", - "integrity": "sha1-M8UDmPcOp+uW0h97gXYwpVeRx+4=", - "dev": true - }, "prr": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", - "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=" - }, - "pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" + "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=", + "dev": true }, "psl": { "version": "1.1.31", "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.31.tgz", - "integrity": "sha512-/6pt4+C+T+wZUieKR620OpzN/LlnNKuWjy1iFLQ/UG35JqHlR/89MP1d96dUfkf6Dne3TuLQzOYEYshJ+Hx8mw==" + "integrity": "sha512-/6pt4+C+T+wZUieKR620OpzN/LlnNKuWjy1iFLQ/UG35JqHlR/89MP1d96dUfkf6Dne3TuLQzOYEYshJ+Hx8mw==", + "dev": true }, "public-encrypt": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/public-encrypt/-/public-encrypt-4.0.3.tgz", "integrity": "sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q==", + "dev": true, "requires": { "bn.js": "^4.1.0", "browserify-rsa": "^4.0.0", @@ -10985,6 +15424,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz", "integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==", + "dev": true, "requires": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -10994,6 +15434,7 @@ "version": "1.5.1", "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-1.5.1.tgz", "integrity": "sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==", + "dev": true, "requires": { "duplexify": "^3.6.0", "inherits": "^2.0.3", @@ -11003,76 +15444,26 @@ "punycode": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" - }, - "puppeteer": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/puppeteer/-/puppeteer-1.11.0.tgz", - "integrity": "sha512-iG4iMOHixc2EpzqRV+pv7o3GgmU2dNYEMkvKwSaQO/vMZURakwSOn/EYJ6OIRFYOque1qorzIBvrytPIQB3YzQ==", - "dev": true, - "requires": { - "debug": "^4.1.0", - "extract-zip": "^1.6.6", - "https-proxy-agent": "^2.2.1", - "mime": "^2.0.3", - "progress": "^2.0.1", - "proxy-from-env": "^1.0.0", - "rimraf": "^2.6.1", - "ws": "^6.1.0" - }, - "dependencies": { - "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - }, - "mime": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.0.tgz", - "integrity": "sha512-ikBcWwyqXQSHKtciCcctu9YfPbFYZ4+gbHEmE0Q8jzcTYQg5dHCr3g2wwAZjPoJfQVXZq6KXAjpXOTf5/cjT7w==", - "dev": true - }, - "ms": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", - "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", - "dev": true - }, - "progress": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", - "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", - "dev": true - }, - "ws": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.1.2.tgz", - "integrity": "sha512-rfUqzvz0WxmSXtJpPMX2EeASXabOrSMk1ruMOV3JBTBjo4ac2lDjGGsbQSyxj8Odhw5fBib8ZKEjDNvgouNKYw==", - "dev": true, - "requires": { - "async-limiter": "~1.0.0" - } - } - } + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true }, "q": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", - "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=" + "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=", + "dev": true }, "qs": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", + "dev": true }, "query-string": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/query-string/-/query-string-4.3.4.tgz", "integrity": "sha1-u7aTucqRXCMlFbIosaArYJBD2+s=", + "dev": true, "requires": { "object-assign": "^4.1.0", "strict-uri-encode": "^1.0.0" @@ -11081,17 +15472,20 @@ "querystring": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", + "dev": true }, "querystring-es3": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/querystring-es3/-/querystring-es3-0.2.1.tgz", - "integrity": "sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM=" + "integrity": "sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM=", + "dev": true }, "querystringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.1.0.tgz", - "integrity": "sha512-sluvZZ1YiTLD5jsqZcDmFyV2EwToyXZBfpoVOmktMmW+VEnhgakFHnasVph65fOjGPTWN0Nw3+XQaSeMayr0kg==" + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.1.1.tgz", + "integrity": "sha512-w7fLxIRCRT7U8Qu53jQnJyPkYZIaR4n5151KMfcJlO/A9397Wxb1amJvROTK6TOnp7PfoAmg/qXiNHI+08jRfA==", + "dev": true }, "raf": { "version": "3.4.0", @@ -11117,27 +15511,11 @@ "ret": "~0.1.10" } }, - "randomatic": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/randomatic/-/randomatic-3.1.1.tgz", - "integrity": "sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw==", - "requires": { - "is-number": "^4.0.0", - "kind-of": "^6.0.0", - "math-random": "^1.0.1" - }, - "dependencies": { - "is-number": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-4.0.0.tgz", - "integrity": "sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==" - } - } - }, "randombytes": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.0.6.tgz", "integrity": "sha512-CIQ5OFxf4Jou6uOKe9t1AOgqpeU5fd70A8NPdHSGeYXqXsPe6peOwI0cUl88RWZ6sP1vPMV3avd/R6cZ5/sP1A==", + "dev": true, "requires": { "safe-buffer": "^5.1.0" } @@ -11146,62 +15524,76 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/randomfill/-/randomfill-1.0.4.tgz", "integrity": "sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw==", + "dev": true, "requires": { "randombytes": "^2.0.5", "safe-buffer": "^5.1.0" } }, "range-parser": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz", - "integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4=" + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "dev": true }, "raw-body": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.3.tgz", - "integrity": "sha512-9esiElv1BrZoI3rCDuOuKCBRbuApGGaDPQfjSflGxdy4oyzqghxu6klEkkVIvBje+FF0BX9coEv8KqW6X/7njw==", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", + "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", + "dev": true, "requires": { - "bytes": "3.0.0", - "http-errors": "1.6.3", - "iconv-lite": "0.4.23", + "bytes": "3.1.0", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", "unpipe": "1.0.0" }, "dependencies": { - "iconv-lite": { - "version": "0.4.23", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.23.tgz", - "integrity": "sha512-neyTUVFtahjf0mB3dZT77u+8O0QB89jFdnBkd5P1JgYPbPaia3gXXOVL2fq8VyU2gMMD7SaN7QukTB/pmXYvDA==", + "bytes": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==", + "dev": true + }, + "http-errors": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", + "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", + "dev": true, "requires": { - "safer-buffer": ">= 2.1.2 < 3" + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" } + }, + "setprototypeof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==", + "dev": true + }, + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", + "dev": true } } }, - "rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", - "requires": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - } - }, "re-resizable": { "version": "4.11.0", "resolved": "https://registry.npmjs.org/re-resizable/-/re-resizable-4.11.0.tgz", "integrity": "sha512-dye+7rERqNf/6mDT1iwps+4Gf42420xuZgygF33uX178DxffqcyeuHbBuJ382FIcB5iP6mMZOhfW7kI0uXwb/Q==" }, "react": { - "version": "16.7.0", - "resolved": "https://registry.npmjs.org/react/-/react-16.7.0.tgz", - "integrity": "sha512-StCz3QY8lxTb5cl2HJxjwLFOXPIFQp+p+hxQfc8WE0QiLfCtIlKj8/+5tjjKm8uSTlAW+fCPaavGFS06V9Ar3A==", + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react/-/react-16.12.0.tgz", + "integrity": "sha512-fglqy3k5E+81pA8s+7K0/T3DBCF0ZDOher1elBFzF7O6arXJgzyu/FW+COxFvAWXJoJN9KIZbT2LXlukwphYTA==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", - "prop-types": "^15.6.2", - "scheduler": "^0.12.0" + "prop-types": "^15.6.2" } }, "react-ace": { @@ -11234,40 +15626,369 @@ } } }, + "react-app-polyfill": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/react-app-polyfill/-/react-app-polyfill-1.0.6.tgz", + "integrity": "sha512-OfBnObtnGgLGfweORmdZbyEz+3dgVePQBb3zipiaDsMHV1NpWm0rDFYIVXFV/AK+x4VIIfWHhrdMIeoTLyRr2g==", + "dev": true, + "requires": { + "core-js": "^3.5.0", + "object-assign": "^4.1.1", + "promise": "^8.0.3", + "raf": "^3.4.1", + "regenerator-runtime": "^0.13.3", + "whatwg-fetch": "^3.0.0" + }, + "dependencies": { + "core-js": { + "version": "3.6.4", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.6.4.tgz", + "integrity": "sha512-4paDGScNgZP2IXXilaffL9X7968RuvwlkK3xWtZRVqgd8SYNiVKRJvkFd1aqqEuPfN7E68ZHEp9hDj6lHj4Hyw==", + "dev": true + }, + "promise": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/promise/-/promise-8.1.0.tgz", + "integrity": "sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q==", + "dev": true, + "requires": { + "asap": "~2.0.6" + } + }, + "raf": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/raf/-/raf-3.4.1.tgz", + "integrity": "sha512-Sq4CW4QhwOHE8ucn6J34MqtZCeWFP2aQSmrlroYgqAV1PjStIhJXxYuTgUIfkEk7zTLjmIjLmU5q+fbD1NnOJA==", + "dev": true, + "requires": { + "performance-now": "^2.1.0" + } + }, + "regenerator-runtime": { + "version": "0.13.5", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", + "integrity": "sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==", + "dev": true + } + } + }, "react-dev-utils": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-5.0.3.tgz", - "integrity": "sha512-Mvs6ofsc2xTjeZIrMaIfbXfsPVrbdVy/cVqq6SAacnqfMlcBpDuivhWZ1ODGeJ8HgmyWTLH971PYjj/EPCDVAw==", + "version": "10.2.1", + "resolved": "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-10.2.1.tgz", + "integrity": "sha512-XxTbgJnYZmxuPtY3y/UV0D8/65NKkmaia4rXzViknVnZeVlklSh8u6TnaEYPfAi/Gh1TP4mEOXHI6jQOPbeakQ==", + "dev": true, "requires": { - "address": "1.0.3", - "babel-code-frame": "6.26.0", - "chalk": "1.1.3", - "cross-spawn": "5.1.0", + "@babel/code-frame": "7.8.3", + "address": "1.1.2", + "browserslist": "4.10.0", + "chalk": "2.4.2", + "cross-spawn": "7.0.1", "detect-port-alt": "1.1.6", - "escape-string-regexp": "1.0.5", - "filesize": "3.5.11", - "global-modules": "1.0.0", - "gzip-size": "3.0.0", - "inquirer": "3.3.0", - "is-root": "1.0.0", - "opn": "5.2.0", - "react-error-overlay": "^4.0.1", - "recursive-readdir": "2.2.1", - "shell-quote": "1.6.1", - "sockjs-client": "1.1.5", - "strip-ansi": "3.0.1", + "escape-string-regexp": "2.0.0", + "filesize": "6.0.1", + "find-up": "4.1.0", + "fork-ts-checker-webpack-plugin": "3.1.1", + "global-modules": "2.0.0", + "globby": "8.0.2", + "gzip-size": "5.1.1", + "immer": "1.10.0", + "inquirer": "7.0.4", + "is-root": "2.1.0", + "loader-utils": "1.2.3", + "open": "^7.0.2", + "pkg-up": "3.1.0", + "react-error-overlay": "^6.0.7", + "recursive-readdir": "2.2.2", + "shell-quote": "1.7.2", + "strip-ansi": "6.0.0", "text-table": "0.2.0" + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.8.3.tgz", + "integrity": "sha512-a9gxpmdXtZEInkCSHUJDLHZVBgb1QS0jhss4cPP93EW7s+uC5bikET2twEF3KV+7rDblJcmNvTR7VJejqd2C2g==", + "dev": true, + "requires": { + "@babel/highlight": "^7.8.3" + } + }, + "@babel/highlight": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.9.0.tgz", + "integrity": "sha512-lJZPilxX7Op3Nv/2cvFdnlepPXDxi29wxteT57Q965oc5R9v86ztx0jfxVrTcBk8C2kcPkkDa2Z4T3ZsPPVWsQ==", + "dev": true, + "requires": { + "@babel/helper-validator-identifier": "^7.9.0", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "ansi-escapes": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", + "integrity": "sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==", + "dev": true, + "requires": { + "type-fest": "^0.11.0" + } + }, + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "browserslist": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.10.0.tgz", + "integrity": "sha512-TpfK0TDgv71dzuTsEAlQiHeWQ/tiPqgNZVdv046fvNtBZrjbv2O3TsWCDU0AWGJJKCF/KsjNdLzR9hXOsh/CfA==", + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30001035", + "electron-to-chromium": "^1.3.378", + "node-releases": "^1.1.52", + "pkg-up": "^3.1.0" + } + }, + "caniuse-lite": { + "version": "1.0.30001039", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001039.tgz", + "integrity": "sha512-SezbWCTT34eyFoWHgx8UWso7YtvtM7oosmFoXbCkdC6qJzRfBTeTgE9REtKtiuKXuMwWTZEvdnFNGAyVMorv8Q==", + "dev": true + }, + "cross-spawn": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.1.tgz", + "integrity": "sha512-u7v4o84SwFpD32Z8IIcPZ6z1/ie24O6RU3RbtL5Y316l3KuHVPx9ItBgWQ6VlfAFnRnTtMUrsQ9MUUTuEZjogg==", + "dev": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "electron-to-chromium": { + "version": "1.3.399", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.399.tgz", + "integrity": "sha512-+NBhK0/v17pls7CSh3Cx5Ir3tsGmtLPMMAO4Nz272bre2wzdykLEsev5wjOd3rYMt2/kSS681ufFT7Dywxq1sw==", + "dev": true + }, + "escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true + }, + "filesize": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/filesize/-/filesize-6.0.1.tgz", + "integrity": "sha512-u4AYWPgbI5GBhs6id1KdImZWn5yfyFrrQ8OWZdN7ZMfA8Bf4HcO0BGo9bmUIEV8yrp8I1xVfJ/dn90GtFNNJcg==", + "dev": true + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "dependencies": { + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + } + } + }, + "inquirer": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.4.tgz", + "integrity": "sha512-Bu5Td5+j11sCkqfqmUTiwv+tWisMtP0L7Q8WrqA2C/BbBhy1YTdFrvjjlrKq8oagA/tLQBski2Gcx/Sqyi2qSQ==", + "dev": true, + "requires": { + "ansi-escapes": "^4.2.1", + "chalk": "^2.4.2", + "cli-cursor": "^3.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.15", + "mute-stream": "0.0.8", + "run-async": "^2.2.0", + "rxjs": "^6.5.3", + "string-width": "^4.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "node-releases": { + "version": "1.1.53", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.53.tgz", + "integrity": "sha512-wp8zyQVwef2hpZ/dJH7SfSrIPD6YoJz6BDQDpGEkcA0s3LpAQoxBIYmfIq6QAhC1DhwsyCgTaTTcONwX8qzCuQ==", + "dev": true + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true + }, + "pkg-up": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz", + "integrity": "sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA==", + "dev": true, + "requires": { + "find-up": "^3.0.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + } + } + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + } + } + }, + "type-fest": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", + "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", + "dev": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + } } }, "react-dom": { - "version": "16.7.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-16.7.0.tgz", - "integrity": "sha512-D0Ufv1ExCAmF38P2Uh1lwpminZFRXEINJe53zRAbm4KPwSyd6DY/uDoS0Blj9jvPpn1+wivKpZYc8aAAN/nAkg==", + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-16.12.0.tgz", + "integrity": "sha512-LMxFfAGrcS3kETtQaCkTKjMiifahaMySFDn71fZUNpPHZQEzmk/GiAeIT8JSOrHB23fnuCOMruL2a8NYlw+8Gw==", "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "prop-types": "^15.6.2", - "scheduler": "^0.12.0" + "scheduler": "^0.18.0" + }, + "dependencies": { + "scheduler": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.18.0.tgz", + "integrity": "sha512-agTSHR1Nbfi6ulI0kYNK0203joW2Y5W4po4l+v03tOoiJKpTBbxpNhWDvqc/4IcOw+KLmSiQLTasZ4cab2/UWQ==", + "requires": { + "loose-envify": "^1.1.0", + "object-assign": "^4.1.1" + } + } } }, "react-dropzone": { @@ -11280,9 +16001,10 @@ } }, "react-error-overlay": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-4.0.1.tgz", - "integrity": "sha512-xXUbDAZkU08aAkjtUvldqbvI04ogv+a1XdHxvYuHPYKIVk/42BIOD0zSKTHAWV4+gDy3yGm283z2072rA2gdtw==" + "version": "6.0.7", + "resolved": "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.7.tgz", + "integrity": "sha512-TAv1KJFh3RhqxNvhzxj6LeT5NWklP6rDr2a0jaTfsZ5wSZWHOGeqQyejUp3xxLfPt2UpyJEcVQB/zyPcmonNFA==", + "dev": true }, "react-event-listener": { "version": "0.6.5", @@ -11361,66 +16083,183 @@ "integrity": "sha1-Oo9SB4/BcfTJZsBc77bf8945lxM=", "dev": true }, - "react-scripts-ts": { - "version": "2.17.0", - "resolved": "https://registry.npmjs.org/react-scripts-ts/-/react-scripts-ts-2.17.0.tgz", - "integrity": "sha512-d1GYqxJ0cvMmUEueG7Yhl9guJLPQyQQFwyawVQejNoGvkcsFCdiG1CMUwFY6fDH/EWj0sLmneUe4DnuO07wGgg==", - "requires": { - "autoprefixer": "7.1.6", - "babel-jest": "^22.1.0", - "babel-loader": "^7.1.2", - "babel-preset-react-app": "^3.1.1", - "case-sensitive-paths-webpack-plugin": "2.1.1", - "chalk": "1.1.3", - "css-loader": "0.28.7", - "dotenv": "4.0.0", - "dotenv-expand": "4.2.0", - "extract-text-webpack-plugin": "3.0.2", - "file-loader": "0.11.2", - "fork-ts-checker-webpack-plugin": "^0.2.8", - "fs-extra": "3.0.1", - "fsevents": "^1.1.3", - "html-webpack-plugin": "2.29.0", - "jest": "22.4.2", - "object-assign": "4.1.1", - "postcss-flexbugs-fixes": "3.2.0", - "postcss-loader": "2.0.8", - "promise": "8.0.1", - "raf": "3.4.0", - "react-dev-utils": "^5.0.1", - "resolve": "1.6.0", - "source-map-loader": "^0.2.1", - "style-loader": "0.19.0", - "sw-precache-webpack-plugin": "0.11.4", - "ts-jest": "22.0.1", - "ts-loader": "^2.3.7", - "tsconfig-paths-webpack-plugin": "^2.0.0", - "tslint": "^5.7.0", - "tslint-config-prettier": "^1.10.0", - "tslint-react": "^3.2.0", - "uglifyjs-webpack-plugin": "^1.1.8", - "url-loader": "0.6.2", - "webpack": "3.8.1", - "webpack-dev-server": "2.9.4", - "webpack-manifest-plugin": "1.3.2", - "whatwg-fetch": "2.0.3" + "react-scripts": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/react-scripts/-/react-scripts-3.4.1.tgz", + "integrity": "sha512-JpTdi/0Sfd31mZA6Ukx+lq5j1JoKItX7qqEK4OiACjVQletM1P38g49d9/D0yTxp9FrSF+xpJFStkGgKEIRjlQ==", + "dev": true, + "requires": { + "@babel/core": "7.9.0", + "@svgr/webpack": "4.3.3", + "@typescript-eslint/eslint-plugin": "^2.10.0", + "@typescript-eslint/parser": "^2.10.0", + "babel-eslint": "10.1.0", + "babel-jest": "^24.9.0", + "babel-loader": "8.1.0", + "babel-plugin-named-asset-import": "^0.3.6", + "babel-preset-react-app": "^9.1.2", + "camelcase": "^5.3.1", + "case-sensitive-paths-webpack-plugin": "2.3.0", + "css-loader": "3.4.2", + "dotenv": "8.2.0", + "dotenv-expand": "5.1.0", + "eslint": "^6.6.0", + "eslint-config-react-app": "^5.2.1", + "eslint-loader": "3.0.3", + "eslint-plugin-flowtype": "4.6.0", + "eslint-plugin-import": "2.20.1", + "eslint-plugin-jsx-a11y": "6.2.3", + "eslint-plugin-react": "7.19.0", + "eslint-plugin-react-hooks": "^1.6.1", + "file-loader": "4.3.0", + "fs-extra": "^8.1.0", + "fsevents": "2.1.2", + "html-webpack-plugin": "4.0.0-beta.11", + "identity-obj-proxy": "3.0.0", + "jest": "24.9.0", + "jest-environment-jsdom-fourteen": "1.0.1", + "jest-resolve": "24.9.0", + "jest-watch-typeahead": "0.4.2", + "mini-css-extract-plugin": "0.9.0", + "optimize-css-assets-webpack-plugin": "5.0.3", + "pnp-webpack-plugin": "1.6.4", + "postcss-flexbugs-fixes": "4.1.0", + "postcss-loader": "3.0.0", + "postcss-normalize": "8.0.1", + "postcss-preset-env": "6.7.0", + "postcss-safe-parser": "4.0.1", + "react-app-polyfill": "^1.0.6", + "react-dev-utils": "^10.2.1", + "resolve": "1.15.0", + "resolve-url-loader": "3.1.1", + "sass-loader": "8.0.2", + "semver": "6.3.0", + "style-loader": "0.23.1", + "terser-webpack-plugin": "2.3.5", + "ts-pnp": "1.1.6", + "url-loader": "2.3.0", + "webpack": "4.42.0", + "webpack-dev-server": "3.10.3", + "webpack-manifest-plugin": "2.2.0", + "workbox-webpack-plugin": "4.3.1" }, "dependencies": { - "promise": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/promise/-/promise-8.0.1.tgz", - "integrity": "sha1-5F1osAoXZHttpxG/he1u1HII9FA=", + "acorn": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", + "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==", + "dev": true + }, + "ajv-keywords": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", + "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==", + "dev": true + }, + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "fsevents": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.1.2.tgz", + "integrity": "sha512-R4wDiBwZ0KzpgOWetKDug1FZcYhqYnUYKtfZYt4mD5SBz76q0KR4Q9o7GIPamsVPGmW3EYPPJ0dOOjvx32ldZA==", + "dev": true, + "optional": true + }, + "neo-async": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.1.tgz", + "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw==", + "dev": true + }, + "resolve": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.15.0.tgz", + "integrity": "sha512-+hTmAldEGE80U2wJJDC1lebb5jWqvTYAfm3YZ1ckk1gBr0MnCqUKlwK1e+anaFljIl+F5tR5IoZcm4ZDA1zMQw==", + "dev": true, "requires": { - "asap": "~2.0.3" + "path-parse": "^1.0.6" } }, - "whatwg-fetch": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-2.0.3.tgz", - "integrity": "sha1-nITsLc9oGH/wC8ZOEnS0QhduHIQ=" + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "webpack": { + "version": "4.42.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.42.0.tgz", + "integrity": "sha512-EzJRHvwQyBiYrYqhyjW9AqM90dE4+s1/XtCfn7uWg6cS72zH+2VPFAlsnW0+W0cDi0XRjNKUMoJtpSi50+Ph6w==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-module-context": "1.8.5", + "@webassemblyjs/wasm-edit": "1.8.5", + "@webassemblyjs/wasm-parser": "1.8.5", + "acorn": "^6.2.1", + "ajv": "^6.10.2", + "ajv-keywords": "^3.4.1", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^4.1.0", + "eslint-scope": "^4.0.3", + "json-parse-better-errors": "^1.0.2", + "loader-runner": "^2.4.0", + "loader-utils": "^1.2.3", + "memory-fs": "^0.4.1", + "micromatch": "^3.1.10", + "mkdirp": "^0.5.1", + "neo-async": "^2.6.1", + "node-libs-browser": "^2.2.1", + "schema-utils": "^1.0.0", + "tapable": "^1.1.3", + "terser-webpack-plugin": "^1.4.3", + "watchpack": "^1.6.0", + "webpack-sources": "^1.4.1" + }, + "dependencies": { + "terser-webpack-plugin": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.3.tgz", + "integrity": "sha512-QMxecFz/gHQwteWwSo5nTc6UaICqN1bMedC5sMtUc7y3Ha3Q8y6ZO0iCR8pq4RJC8Hjf0FEPEHZqcMB/+DFCrA==", + "dev": true, + "requires": { + "cacache": "^12.0.2", + "find-cache-dir": "^2.1.0", + "is-wsl": "^1.1.0", + "schema-utils": "^1.0.0", + "serialize-javascript": "^2.1.2", + "source-map": "^0.6.1", + "terser": "^4.1.2", + "webpack-sources": "^1.4.0", + "worker-farm": "^1.7.0" + } + } + } } } }, + "react-svg-line-chart": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/react-svg-line-chart/-/react-svg-line-chart-2.0.2.tgz", + "integrity": "sha512-M5stlZvUK6/ihIgpTY30dYA36XaH77QtD3iGLTT+ZMiI8LYKmifaKNeJ0LGX2Y11tmT1e/OM6UejfneOaxk8nw==", + "requires": { + "styled-components": "^2.4.0" + } + }, "react-test-renderer": { "version": "16.7.0", "resolved": "https://registry.npmjs.org/react-test-renderer/-/react-test-renderer-16.7.0.tgz", @@ -11498,16 +16337,11 @@ } } }, - "read-chunk": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/read-chunk/-/read-chunk-1.0.1.tgz", - "integrity": "sha1-X2jKswfmY/GZk1J9m1icrORmEZQ=", - "dev": true - }, "read-pkg": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=", + "dev": true, "requires": { "load-json-file": "^1.0.0", "normalize-package-data": "^2.3.2", @@ -11518,6 +16352,7 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=", + "dev": true, "requires": { "find-up": "^1.0.0", "read-pkg": "^1.0.0" @@ -11527,6 +16362,7 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", + "dev": true, "requires": { "path-exists": "^2.0.0", "pinkie-promise": "^2.0.0" @@ -11536,6 +16372,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", + "dev": true, "requires": { "pinkie-promise": "^2.0.0" } @@ -11546,6 +16383,7 @@ "version": "2.3.6", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "dev": true, "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -11560,6 +16398,7 @@ "version": "2.2.1", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", + "dev": true, "requires": { "graceful-fs": "^4.1.11", "micromatch": "^3.1.10", @@ -11567,9 +16406,10 @@ } }, "realpath-native": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/realpath-native/-/realpath-native-1.0.2.tgz", - "integrity": "sha512-+S3zTvVt9yTntFrBpm7TQmQ3tzpCrnA1a/y+3cUHAc9ZR6aIjG0WNLR+Rj79QpJktY+VeW/TQtFlQ1bzsehI8g==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/realpath-native/-/realpath-native-1.1.0.tgz", + "integrity": "sha512-wlgPA6cCIIg9gKz0fgAPjnzh4yR/LnXovwuo9hvyGvx3h8nX4+/iLZplfUWasXpqD8BdnGnP5njOFjkUwPzvjA==", + "dev": true, "requires": { "util.promisify": "^1.0.0" } @@ -11595,27 +16435,19 @@ } }, "recursive-readdir": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.1.tgz", - "integrity": "sha1-kO8jHQd4xc4JPJpI105cVCLROpk=", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz", + "integrity": "sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg==", + "dev": true, "requires": { - "minimatch": "3.0.3" - }, - "dependencies": { - "minimatch": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", - "integrity": "sha1-Kk5AkLlrLbBqnX3wEFWmKnfJt3Q=", - "requires": { - "brace-expansion": "^1.0.0" - } - } + "minimatch": "3.0.4" } }, "redent": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/redent/-/redent-1.0.0.tgz", "integrity": "sha1-z5Fqsf1fHxbfsggi3W7H9zDCr94=", + "dev": true, "requires": { "indent-string": "^2.1.0", "strip-indent": "^1.0.1" @@ -11630,28 +16462,26 @@ "esprima": "~4.0.0" } }, - "reduce-css-calc": { - "version": "1.3.0", - "resolved": "http://registry.npmjs.org/reduce-css-calc/-/reduce-css-calc-1.3.0.tgz", - "integrity": "sha1-dHyRTgSWFKTJz7umKYca0dKSdxY=", - "requires": { - "balanced-match": "^0.4.2", - "math-expression-evaluator": "^1.2.14", - "reduce-function-call": "^1.0.1" - } - }, - "reduce-function-call": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/reduce-function-call/-/reduce-function-call-1.0.2.tgz", - "integrity": "sha1-WiAL+S4ON3UXUv5FsKszD9S2vpk=", - "requires": { - "balanced-match": "^0.4.2" - } + "reflect.ownkeys": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/reflect.ownkeys/-/reflect.ownkeys-0.2.0.tgz", + "integrity": "sha1-dJrO7H8/34tj+SegSAnpDFwLNGA=", + "dev": true }, "regenerate": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.0.tgz", - "integrity": "sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg==" + "integrity": "sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg==", + "dev": true + }, + "regenerate-unicode-properties": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-8.2.0.tgz", + "integrity": "sha512-F9DjY1vKLo/tPePDycuH3dn9H1OTPIkVD9Kz4LODu+F2C75mgjAJ7x/gwy6ZcSNRAAkhNlJSOHRe8k3p+K9WhA==", + "dev": true, + "requires": { + "regenerate": "^1.4.0" + } }, "regenerator-runtime": { "version": "0.12.1", @@ -11659,21 +16489,30 @@ "integrity": "sha512-odxIc1/vDlo4iZcfXqRYFj0vpXFNoGdKMAUieAlFYO6m/nl5e9KR/beGf41z4a1FI+aQgtjhuaSlDxQ0hmkrHg==" }, "regenerator-transform": { - "version": "0.10.1", - "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.10.1.tgz", - "integrity": "sha512-PJepbvDbuK1xgIgnau7Y90cwaAmO/LCLMI2mPvaXq2heGMR3aWW5/BQvYrhJ8jgmQjXewXvBjzfqKcVOmhjZ6Q==", - "requires": { - "babel-runtime": "^6.18.0", - "babel-types": "^6.19.0", - "private": "^0.1.6" - } - }, - "regex-cache": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/regex-cache/-/regex-cache-0.4.4.tgz", - "integrity": "sha512-nVIZwtCjkC9YgvWkpM55B5rBhBYRZhAaJbgcFYXXsHnbZ9UZI9nnVWYZpBlCqv9ho2eZryPnWrZGsOdPwVWXWQ==", + "version": "0.14.4", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.14.4.tgz", + "integrity": "sha512-EaJaKPBI9GvKpvUz2mz4fhx7WPgvwRLY9v3hlNHWmAuJHI13T4nwKnNvm5RWJzEdnI5g5UwtOww+S8IdoUC2bw==", + "dev": true, "requires": { - "is-equal-shallow": "^0.1.3" + "@babel/runtime": "^7.8.4", + "private": "^0.1.8" + }, + "dependencies": { + "@babel/runtime": { + "version": "7.9.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.9.2.tgz", + "integrity": "sha512-NE2DtOdufG7R5vnfQUTehdTfNycfUANEtCa9PssN9O/xmTzP4E08UI797ixaei6hBEVL9BI/PsdJS5x7mWoB9Q==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "regenerator-runtime": { + "version": "0.13.5", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", + "integrity": "sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==", + "dev": true + } } }, "regex-not": { @@ -11685,42 +16524,140 @@ "safe-regex": "^1.1.0" } }, - "regexpu-core": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-2.0.0.tgz", - "integrity": "sha1-SdA4g3uNz4v6W5pCE5k45uoq4kA=", - "requires": { - "regenerate": "^1.2.1", - "regjsgen": "^0.2.0", - "regjsparser": "^0.1.4" - } + "regex-parser": { + "version": "2.2.10", + "resolved": "https://registry.npmjs.org/regex-parser/-/regex-parser-2.2.10.tgz", + "integrity": "sha512-8t6074A68gHfU8Neftl0Le6KTDwfGAj7IyjPIMSfikI2wJUTHDMaIq42bUsfVnj8mhx0R+45rdUXHGpN164avA==", + "dev": true }, - "registry-auth-token": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-3.3.2.tgz", - "integrity": "sha512-JL39c60XlzCVgNrO+qq68FoNb56w/m7JYvGR2jT5iR1xBrUA3Mfx5Twk5rqTThPmQKMWydGmq8oFtDlxfrmxnQ==", + "regexp.prototype.flags": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.3.0.tgz", + "integrity": "sha512-2+Q0C5g951OlYlJz6yu5/M33IcsESLlLfsyIaLJaG4FA2r4yP8MvVMJUUP/fVBkSpbbbZlS5gynbEWLipiiXiQ==", + "dev": true, "requires": { - "rc": "^1.1.6", - "safe-buffer": "^5.0.1" + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "string.prototype.trimleft": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", + "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimstart": "^1.0.0" + } + }, + "string.prototype.trimright": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", + "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimend": "^1.0.0" + } + } } }, - "registry-url": { + "regexpp": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-3.1.0.tgz", - "integrity": "sha1-PU74cPc93h138M+aOBQyRE4XSUI=", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz", + "integrity": "sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q==", + "dev": true + }, + "regexpu-core": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-4.7.0.tgz", + "integrity": "sha512-TQ4KXRnIn6tz6tjnrXEkD/sshygKH/j5KzK86X8MkeHyZ8qst/LZ89j3X4/8HEIfHANTFIP/AbXakeRhWIl5YQ==", + "dev": true, "requires": { - "rc": "^1.0.1" + "regenerate": "^1.4.0", + "regenerate-unicode-properties": "^8.2.0", + "regjsgen": "^0.5.1", + "regjsparser": "^0.6.4", + "unicode-match-property-ecmascript": "^1.0.4", + "unicode-match-property-value-ecmascript": "^1.2.0" } }, "regjsgen": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.2.0.tgz", - "integrity": "sha1-bAFq3qxVT3WCP+N6wFuS1aTtsfc=" + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.5.1.tgz", + "integrity": "sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg==", + "dev": true }, "regjsparser": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.1.5.tgz", - "integrity": "sha1-fuj4Tcb6eS0/0K4ijSS9lJ6tIFw=", + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.6.4.tgz", + "integrity": "sha512-64O87/dPDgfk8/RQqC4gkZoGyyWFIEUTTh80CU6CWuK5vkCGyekIx+oKcEIYtP/RAxSQltCZHCNu/mdd7fqlJw==", + "dev": true, "requires": { "jsesc": "~0.5.0" }, @@ -11728,30 +16665,51 @@ "jsesc": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", - "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=" + "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=", + "dev": true } } }, "relateurl": { "version": "0.2.7", "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", - "integrity": "sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=" + "integrity": "sha1-VNvzd+UUQKypCkzSdGANP/LYiKk=", + "dev": true }, "remove-trailing-separator": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", - "integrity": "sha1-wkvOKig62tW8P1jg1IJJuSN52O8=" + "integrity": "sha1-wkvOKig62tW8P1jg1IJJuSN52O8=", + "dev": true }, "renderkid": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-2.0.2.tgz", - "integrity": "sha512-FsygIxevi1jSiPY9h7vZmBFUbAOcbYm9UwyiLNdVsLRs/5We9Ob5NMPbGYUTWiLq5L+ezlVdE0A8bbME5CWTpg==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-2.0.3.tgz", + "integrity": "sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA==", + "dev": true, "requires": { "css-select": "^1.1.0", - "dom-converter": "~0.2", - "htmlparser2": "~3.3.0", + "dom-converter": "^0.2", + "htmlparser2": "^3.3.0", "strip-ansi": "^3.0.0", "utila": "^0.4.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } + } } }, "repeat-element": { @@ -11768,6 +16726,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/repeating/-/repeating-2.0.1.tgz", "integrity": "sha1-UhTFOpJtNVJwdSf7q0FdvAjQbdo=", + "dev": true, "requires": { "is-finite": "^1.0.0" } @@ -11776,6 +16735,7 @@ "version": "2.88.0", "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "dev": true, "requires": { "aws-sign2": "~0.7.0", "aws4": "^1.8.0", @@ -11802,12 +16762,14 @@ "punycode": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" + "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", + "dev": true }, "tough-cookie": { "version": "2.4.3", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "dev": true, "requires": { "psl": "^1.1.24", "punycode": "^1.4.1" @@ -11815,19 +16777,11 @@ } } }, - "request-progress": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/request-progress/-/request-progress-2.0.1.tgz", - "integrity": "sha1-XTa7V5YcZzqlt4jbyBQf3yO0Tgg=", - "dev": true, - "requires": { - "throttleit": "^1.0.0" - } - }, "request-promise-core": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.1.tgz", "integrity": "sha1-Pu4AssWqgyOc+wTFcA2jb4HNCLY=", + "dev": true, "requires": { "lodash": "^4.13.1" } @@ -11836,6 +16790,7 @@ "version": "1.0.5", "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.5.tgz", "integrity": "sha1-UoF3D2jgyXGeUWP9P6tIIhX0/aU=", + "dev": true, "requires": { "request-promise-core": "1.1.1", "stealthy-require": "^1.1.0", @@ -11845,17 +16800,14 @@ "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" - }, - "require-from-string": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-1.2.1.tgz", - "integrity": "sha1-UpyczvJzgK3+yaL5ZbZJu+5jZBg=" + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true }, "require-main-filename": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", - "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "dev": true }, "requires-port": { "version": "1.0.0", @@ -11866,6 +16818,7 @@ "version": "1.6.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.6.0.tgz", "integrity": "sha512-mw7JQNu5ExIkcw4LPih0owX/TZXjD/ZUF/ZQ/pDnkw3ZKhDcZZw5klmBlj6gVMwjQ3Pz5Jgu7F3d0jcDVuEWdw==", + "dev": true, "requires": { "path-parse": "^1.0.5" } @@ -11874,23 +16827,16 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-2.0.0.tgz", "integrity": "sha1-AKn3OHVW4nA46uIyyqNypqWbZlo=", + "dev": true, "requires": { "resolve-from": "^3.0.0" } }, - "resolve-dir": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/resolve-dir/-/resolve-dir-1.0.1.tgz", - "integrity": "sha1-eaQGRMNivoLybv/nOcm7U4IEb0M=", - "requires": { - "expand-tilde": "^2.0.0", - "global-modules": "^1.0.0" - } - }, "resolve-from": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", - "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=" + "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=", + "dev": true }, "resolve-pathname": { "version": "2.2.0", @@ -11902,12 +16848,59 @@ "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=" }, + "resolve-url-loader": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/resolve-url-loader/-/resolve-url-loader-3.1.1.tgz", + "integrity": "sha512-K1N5xUjj7v0l2j/3Sgs5b8CjrrgtC70SmdCuZiJ8tSyb5J+uk3FoeZ4b7yTnH6j7ngI+Bc5bldHJIa8hYdu2gQ==", + "dev": true, + "requires": { + "adjust-sourcemap-loader": "2.0.0", + "camelcase": "5.3.1", + "compose-function": "3.0.3", + "convert-source-map": "1.7.0", + "es6-iterator": "2.0.3", + "loader-utils": "1.2.3", + "postcss": "7.0.21", + "rework": "1.0.1", + "rework-visit": "1.0.0", + "source-map": "0.6.1" + }, + "dependencies": { + "postcss": { + "version": "7.0.21", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.21.tgz", + "integrity": "sha512-uIFtJElxJo29QC753JzhidoAhvp/e/Exezkdhfmt8AymWT6/5B7W1WmponYWkHk2eg6sONyTch0A3nkMPun3SQ==", + "dev": true, + "requires": { + "chalk": "^2.4.2", + "source-map": "^0.6.1", + "supports-color": "^6.1.0" + } + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "supports-color": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } + } + } + }, "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, "requires": { - "onetime": "^2.0.0", + "onetime": "^5.1.0", "signal-exit": "^3.0.2" } }, @@ -11916,18 +16909,89 @@ "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==" }, - "right-align": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/right-align/-/right-align-0.1.3.tgz", - "integrity": "sha1-YTObci/mo1FWiSENJOFMlhSGE+8=", + "retry": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha1-G0KmJmoh8HQh0bC1S33BZ7AcATs=", + "dev": true + }, + "retry-request": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/retry-request/-/retry-request-4.1.1.tgz", + "integrity": "sha512-BINDzVtLI2BDukjWmjAIRZ0oglnCAkpP2vQjM3jdLhmT62h0xnQgciPwBRDAvHqpkPT2Wo1XuUyLyn6nbGrZQQ==", + "dev": true, + "requires": { + "debug": "^4.1.1", + "through2": "^3.0.1" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "through2": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.1.tgz", + "integrity": "sha512-M96dvTalPT3YbYLaKaCuwu+j06D/8Jfib0o/PxbVt6Amhv3dUAtW6rTV1jPgJSBG83I/e04Y6xkVdVhSRhi0ww==", + "dev": true, + "requires": { + "readable-stream": "2 || 3" + } + } + } + }, + "rework": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/rework/-/rework-1.0.1.tgz", + "integrity": "sha1-MIBqhBNCtUUQqkEQhQzUhTQUSqc=", + "dev": true, "requires": { - "align-text": "^0.1.1" + "convert-source-map": "^0.3.3", + "css": "^2.0.0" + }, + "dependencies": { + "convert-source-map": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-0.3.5.tgz", + "integrity": "sha1-8dgClQr33SYxof6+BZZVDIarMZA=", + "dev": true + } } }, + "rework-visit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/rework-visit/-/rework-visit-1.0.0.tgz", + "integrity": "sha1-mUWygD8hni96ygCtuLyfZA+ELJo=", + "dev": true + }, + "rgb-regex": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/rgb-regex/-/rgb-regex-1.0.1.tgz", + "integrity": "sha1-wODWiC3w4jviVKR16O3UGRX+rrE=", + "dev": true + }, + "rgba-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/rgba-regex/-/rgba-regex-1.0.0.tgz", + "integrity": "sha1-QzdOLiyglosO8VI0YLfXMP8i7rM=", + "dev": true + }, "rimraf": { "version": "2.6.3", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "dev": true, "requires": { "glob": "^7.1.3" } @@ -11936,6 +17000,7 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", + "dev": true, "requires": { "hash-base": "^3.0.0", "inherits": "^2.0.1" @@ -11952,14 +17017,16 @@ } }, "rsvp": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/rsvp/-/rsvp-3.6.2.tgz", - "integrity": "sha512-OfWGQTb9vnwRjwtA2QwpG2ICclHC3pgXZO5xt8H2EfgDquO0qVdSb5T88L4qJVAEugbS56pAuV4XZM58UX8ulw==" + "version": "4.8.5", + "resolved": "https://registry.npmjs.org/rsvp/-/rsvp-4.8.5.tgz", + "integrity": "sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA==", + "dev": true }, "run-async": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", - "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.0.tgz", + "integrity": "sha512-xJTbh/d7Lm7SBhc1tNvTpeCHaEzoyxPrqNlvSdMfBTYwaY++UJFyXUOxAtsRUXjlqOfj8luNaR9vjCh4KeV+pg==", + "dev": true, "requires": { "is-promise": "^2.1.0" } @@ -11968,6 +17035,7 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/run-queue/-/run-queue-1.0.3.tgz", "integrity": "sha1-6Eg5bwV9Ij8kOGkkYY4laUFh7Ec=", + "dev": true, "requires": { "aproba": "^1.1.1" } @@ -11977,23 +17045,20 @@ "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", "integrity": "sha1-P4Yt+pGrdmsUiF700BEkv9oHT7Q=" }, - "rx-lite": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/rx-lite/-/rx-lite-4.0.8.tgz", - "integrity": "sha1-Cx4Rr4vESDbwSmQH6S2kJGe3lEQ=" - }, - "rx-lite-aggregates": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz", - "integrity": "sha1-dTuHqJoRyVRnxKwWJsTvxOBcZ74=", + "rxjs": { + "version": "6.5.5", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.5.tgz", + "integrity": "sha512-WfQI+1gohdf0Dai/Bbmk5L5ItH5tYqm3ki2c5GdWhKjalzjg93N3avFjVStyZZz+A2Em+ZxKH5bNghw9UeylGQ==", + "dev": true, "requires": { - "rx-lite": "*" + "tslib": "^1.9.0" } }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true }, "safe-regex": { "version": "1.1.0", @@ -12009,28 +17074,87 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, "sane": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/sane/-/sane-2.5.2.tgz", - "integrity": "sha1-tNwYYcIbQn6SlQej51HiosuKs/o=", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/sane/-/sane-4.1.0.tgz", + "integrity": "sha512-hhbzAgTIX8O7SHfp2c8/kREfEn4qO/9q8C9beyY6+tvZ87EpoZ3i1RIEvp27YBswnNbY9mWd6paKVmKbAgLfZA==", + "dev": true, "requires": { + "@cnakazawa/watch": "^1.0.3", "anymatch": "^2.0.0", - "capture-exit": "^1.2.0", - "exec-sh": "^0.2.0", + "capture-exit": "^2.0.0", + "exec-sh": "^0.3.2", + "execa": "^1.0.0", "fb-watchman": "^2.0.0", - "fsevents": "^1.2.3", "micromatch": "^3.1.4", "minimist": "^1.1.1", - "walker": "~1.0.5", - "watch": "~0.18.0" + "walker": "~1.0.5" + } + }, + "sanitize.css": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/sanitize.css/-/sanitize.css-10.0.0.tgz", + "integrity": "sha512-vTxrZz4dX5W86M6oVWVdOVe72ZiPs41Oi7Z6Km4W5Turyz28mrXSJhhEBZoRtzJWIv3833WKVwLSDWWkEfupMg==", + "dev": true + }, + "sass-loader": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/sass-loader/-/sass-loader-8.0.2.tgz", + "integrity": "sha512-7o4dbSK8/Ol2KflEmSco4jTjQoV988bM82P9CZdmo9hR3RLnvNc0ufMNdMrB0caq38JQ/FgF4/7RcbcfKzxoFQ==", + "dev": true, + "requires": { + "clone-deep": "^4.0.1", + "loader-utils": "^1.2.3", + "neo-async": "^2.6.1", + "schema-utils": "^2.6.1", + "semver": "^6.3.0" }, "dependencies": { - "anymatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", - "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", + "ajv-keywords": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", + "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==", + "dev": true + }, + "clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", + "dev": true, "requires": { - "micromatch": "^3.1.4", - "normalize-path": "^2.1.1" + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" + } + }, + "neo-async": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.1.tgz", + "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw==", + "dev": true + }, + "schema-utils": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.6.5.tgz", + "integrity": "sha512-5KXuwKziQrTVHh8j/Uxz+QUbxkaLW9X/86NBlx/gnKgtsZA2GIVMUn17qWhRFwF8jdYb3Dig5hRO/W5mZqy6SQ==", + "dev": true, + "requires": { + "ajv": "^6.12.0", + "ajv-keywords": "^3.4.1" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", + "dev": true, + "requires": { + "kind-of": "^6.0.2" } } } @@ -12038,55 +17162,65 @@ "sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", - "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==", + "dev": true + }, + "saxes": { + "version": "3.1.11", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-3.1.11.tgz", + "integrity": "sha512-Ydydq3zC+WYDJK1+gRxRapLIED9PWeSuuS41wqyoRmzvhhh9nc+QQrVMKJYzJFULazeGhzSV0QleN2wD3boh2g==", + "dev": true, + "requires": { + "xmlchars": "^2.1.1" + } }, "scheduler": { "version": "0.12.0", "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.12.0.tgz", "integrity": "sha512-t7MBR28Akcp4Jm+QoR63XgAi9YgCUmgvDHqf5otgAj4QvdoBE4ImCX0ffehefePPG+aitiYHp0g/mW6s4Tp+dw==", + "dev": true, "requires": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "schema-utils": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-0.3.0.tgz", - "integrity": "sha1-9YdyIs4+kx7a4DnxfrNxbnE3+M8=", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", + "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", + "dev": true, "requires": { - "ajv": "^5.0.0" + "ajv": "^6.1.0", + "ajv-errors": "^1.0.0", + "ajv-keywords": "^3.1.0" } }, "select-hose": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", - "integrity": "sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo=" + "integrity": "sha1-Yl2GWPhlr0Psliv8N2o3NZpJlMo=", + "dev": true }, "selfsigned": { - "version": "1.10.4", - "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-1.10.4.tgz", - "integrity": "sha512-9AukTiDmHXGXWtWjembZ5NDmVvP2695EtpgbCsxCa68w3c88B+alqbmZ4O3hZ4VWGXeGWzEVdvqgAJD8DQPCDw==", + "version": "1.10.7", + "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-1.10.7.tgz", + "integrity": "sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA==", + "dev": true, "requires": { - "node-forge": "0.7.5" + "node-forge": "0.9.0" } }, "semver": { "version": "5.6.0", "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz", - "integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==" - }, - "semver-diff": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-2.1.0.tgz", - "integrity": "sha1-S7uEN8jTfksM8aaP1ybsbWRdbTY=", - "requires": { - "semver": "^5.0.3" - } + "integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==", + "dev": true }, "send": { - "version": "0.16.2", - "resolved": "https://registry.npmjs.org/send/-/send-0.16.2.tgz", - "integrity": "sha512-E64YFPUssFHEFBvpbbjr44NCLtI1AohxQ8ZSiJjQLskAdKuriYEP6VyGEsRDH8ScozGpkaX1BGvhanqCwkcEZw==", + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz", + "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==", + "dev": true, "requires": { "debug": "2.6.9", "depd": "~1.1.2", @@ -12095,26 +17229,67 @@ "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", - "http-errors": "~1.6.2", - "mime": "1.4.1", - "ms": "2.0.0", + "http-errors": "~1.7.2", + "mime": "1.6.0", + "ms": "2.1.1", "on-finished": "~2.3.0", - "range-parser": "~1.2.0", - "statuses": "~1.4.0" + "range-parser": "~1.2.1", + "statuses": "~1.5.0" }, "dependencies": { "debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "requires": { + "ms": "2.0.0" + }, + "dependencies": { + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", + "dev": true + } + } + }, + "http-errors": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.3.tgz", + "integrity": "sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw==", + "dev": true, "requires": { - "ms": "2.0.0" + "depd": "~1.1.2", + "inherits": "2.0.4", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" } }, - "mime": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz", - "integrity": "sha512-KI1+qOZu5DcW6wayYHSzR/tXKCDC5Om4s1z2QJjDULzLcmf3DvzS7oluY4HCTrc+9FiKmWUgeNLg7W3uIQvxtQ==" + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==", + "dev": true + }, + "setprototypeof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==", + "dev": true + }, + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", + "dev": true } } }, @@ -12129,14 +17304,16 @@ } }, "serialize-javascript": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-1.6.1.tgz", - "integrity": "sha512-A5MOagrPFga4YaKQSWHryl7AXvbQkEqpw4NNYMTNYUNV51bA8ABHgYFpqKx+YFFrw59xMV1qGH1R4AgoNIVgCw==" + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-2.1.2.tgz", + "integrity": "sha512-rs9OggEUF0V4jUSecXazOYsLfu7OGK2qIn3c7IPBiffz32XniEp/TX9Xmc9LQfK2nQ2QKHvZ2oygKUGU0lG4jQ==", + "dev": true }, "serve-index": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", "integrity": "sha1-03aNabHn2C5c4FD/9bRTvqEqkjk=", + "dev": true, "requires": { "accepts": "~1.3.4", "batch": "0.6.1", @@ -12151,6 +17328,7 @@ "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, "requires": { "ms": "2.0.0" } @@ -12158,30 +17336,35 @@ } }, "serve-static": { - "version": "1.13.2", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz", - "integrity": "sha512-p/tdJrO4U387R9oMjb1oj7qSMaMfmOyd4j9hOFoxZe2baQszgHcSWjuya/CiT5kgZZKRudHNOA0pYXOl8rQ5nw==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", + "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==", + "dev": true, "requires": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", - "parseurl": "~1.3.2", - "send": "0.16.2" + "parseurl": "~1.3.3", + "send": "0.17.1" + }, + "dependencies": { + "parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "dev": true + } } }, - "serviceworker-cache-polyfill": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/serviceworker-cache-polyfill/-/serviceworker-cache-polyfill-4.0.0.tgz", - "integrity": "sha1-3hnuc77yGrPAdAo3sz22JGS6ves=" - }, "set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true }, "set-value": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.0.tgz", - "integrity": "sha512-hw0yxk9GT/Hr5yJEYnHNKYXkIA8mVJgd9ditYZCe16ZczcaELYYcfvaXesNACk2O8O0nTiPQcQhGUQj8JLzeeg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", + "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", "requires": { "extend-shallow": "^2.0.1", "is-extendable": "^0.1.1", @@ -12207,21 +17390,53 @@ "setprototypeof": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz", - "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==" + "integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ==", + "dev": true }, "sha.js": { "version": "2.4.11", "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "dev": true, "requires": { "inherits": "^2.0.1", "safe-buffer": "^5.0.1" } }, + "shallow-clone": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-0.1.2.tgz", + "integrity": "sha1-WQnodLp3EG1zrEFM/sH/yofZcGA=", + "dev": true, + "requires": { + "is-extendable": "^0.1.1", + "kind-of": "^2.0.1", + "lazy-cache": "^0.2.3", + "mixin-object": "^2.0.1" + }, + "dependencies": { + "kind-of": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-2.0.1.tgz", + "integrity": "sha1-AY7HpM5+OobLkUG+UZ0kyPqpgbU=", + "dev": true, + "requires": { + "is-buffer": "^1.0.2" + } + }, + "lazy-cache": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-0.2.7.tgz", + "integrity": "sha1-f+3fLctu23fRHvHRF6tf/fCrG2U=", + "dev": true + } + } + }, "shebang-command": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, "requires": { "shebang-regex": "^1.0.0" } @@ -12229,33 +17444,171 @@ "shebang-regex": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=" + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true }, "shell-quote": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.6.1.tgz", - "integrity": "sha1-9HgZSczkAmlxJ0MOo7PFR29IF2c=", - "requires": { - "array-filter": "~0.0.0", - "array-map": "~0.0.0", - "array-reduce": "~0.0.0", - "jsonify": "~0.0.0" - } + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.2.tgz", + "integrity": "sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg==", + "dev": true }, "shellwords": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/shellwords/-/shellwords-0.1.1.tgz", - "integrity": "sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww==" + "integrity": "sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww==", + "dev": true + }, + "side-channel": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.2.tgz", + "integrity": "sha512-7rL9YlPHg7Ancea1S96Pa8/QWb4BtXL/TZvS6B8XFetGBeuhAsfmUspK6DokBeZ64+Kj9TCNRD/30pVz1BvQNA==", + "dev": true, + "requires": { + "es-abstract": "^1.17.0-next.1", + "object-inspect": "^1.7.0" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "string.prototype.trimleft": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", + "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimstart": "^1.0.0" + } + }, + "string.prototype.trimright": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", + "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimend": "^1.0.0" + } + } + } }, "signal-exit": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=" + "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", + "dev": true + }, + "simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", + "dev": true, + "requires": { + "is-arrayish": "^0.3.1" + }, + "dependencies": { + "is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", + "dev": true + } + } + }, + "sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true }, "slash": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", - "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "dev": true + }, + "slice-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", + "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "astral-regex": "^1.0.0", + "is-fullwidth-code-point": "^2.0.0" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + } + } }, "snake-case": { "version": "2.1.0", @@ -12266,6 +17619,12 @@ "no-case": "^2.2.0" } }, + "snakeize": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/snakeize/-/snakeize-0.1.0.tgz", + "integrity": "sha1-EMCI2LWOsHazIpu1oE4jLOEmQi0=", + "dev": true + }, "snapdragon": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", @@ -12371,50 +17730,232 @@ } } }, - "sockjs": { - "version": "0.3.18", - "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.18.tgz", - "integrity": "sha1-2bKJMWyn33dZXvKZ4HXw+TfrQgc=", + "snapshot-diff": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/snapshot-diff/-/snapshot-diff-0.6.1.tgz", + "integrity": "sha512-wWt3x4fb7FJIcV05Ng9NceVSTvQYE493sIqebzUoQbQlRG6rIR03KaRt8o/7W7znaYjUbP0eOq1iK+DfpZXaeQ==", + "dev": true, "requires": { - "faye-websocket": "^0.10.0", - "uuid": "^2.0.2" + "jest-diff": "^24.0.0", + "jest-snapshot": "^24.0.0", + "pretty-format": "^24.0.0", + "strip-ansi": "^5.0.0" }, "dependencies": { - "faye-websocket": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.10.0.tgz", - "integrity": "sha1-TkkvjQTftviQA1B/btvy1QHnxvQ=", + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, "requires": { - "websocket-driver": ">=0.5.1" + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "expect": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-24.9.0.tgz", + "integrity": "sha512-wvVAx8XIol3Z5m9zvZXiyZOQ+sRJqNTIm6sGjdWlaZIeupQGO3WbYI+15D/AmEwZywL6wtJkbAbJtzkOfBuR0Q==", + "dev": true, + "requires": { + "@jest/types": "^24.9.0", + "ansi-styles": "^3.2.0", + "jest-get-type": "^24.9.0", + "jest-matcher-utils": "^24.9.0", + "jest-message-util": "^24.9.0", + "jest-regex-util": "^24.9.0" + } + }, + "jest-diff": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-24.9.0.tgz", + "integrity": "sha512-qMfrTs8AdJE2iqrTp0hzh7kTd2PQWrsFyj9tORoKmu32xjPjeE4NyjVRDz8ybYwqS2ik8N4hsIpiVTyFeo2lBQ==", + "dev": true, + "requires": { + "chalk": "^2.0.1", + "diff-sequences": "^24.9.0", + "jest-get-type": "^24.9.0", + "pretty-format": "^24.9.0" + } + }, + "jest-get-type": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-24.9.0.tgz", + "integrity": "sha512-lUseMzAley4LhIcpSP9Jf+fTrQ4a1yHQwLNeeVa2cEmbCGeoZAtYPOIv8JaxLD/sUpKxetKGP+gsHl8f8TSj8Q==", + "dev": true + }, + "jest-matcher-utils": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-24.9.0.tgz", + "integrity": "sha512-OZz2IXsu6eaiMAwe67c1T+5tUAtQyQx27/EMEkbFAGiw52tB9em+uGbzpcgYVpA8wl0hlxKPZxrly4CXU/GjHA==", + "dev": true, + "requires": { + "chalk": "^2.0.1", + "jest-diff": "^24.9.0", + "jest-get-type": "^24.9.0", + "pretty-format": "^24.9.0" + } + }, + "jest-message-util": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-24.9.0.tgz", + "integrity": "sha512-oCj8FiZ3U0hTP4aSui87P4L4jC37BtQwUMqk+zk/b11FR19BJDeZsZAvIHutWnmtw7r85UmR3CEWZ0HWU2mAlw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "@jest/test-result": "^24.9.0", + "@jest/types": "^24.9.0", + "@types/stack-utils": "^1.0.1", + "chalk": "^2.0.1", + "micromatch": "^3.1.10", + "slash": "^2.0.0", + "stack-utils": "^1.0.1" + } + }, + "jest-regex-util": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-24.9.0.tgz", + "integrity": "sha512-05Cmb6CuxaA+Ys6fjr3PhvV3bGQmO+2p2La4hFbU+W5uOc479f7FdLXUWXw4pYMAhhSZIuKHwSXSu6CsSBAXQA==", + "dev": true + }, + "jest-resolve": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-24.9.0.tgz", + "integrity": "sha512-TaLeLVL1l08YFZAt3zaPtjiVvyy4oSA6CRe+0AFPPVX3Q/VI0giIWWoAvoS5L96vj9Dqxj4fB5p2qrHCmTU/MQ==", + "dev": true, + "requires": { + "@jest/types": "^24.9.0", + "browser-resolve": "^1.11.3", + "chalk": "^2.0.1", + "jest-pnp-resolver": "^1.2.1", + "realpath-native": "^1.1.0" + } + }, + "jest-snapshot": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-24.9.0.tgz", + "integrity": "sha512-uI/rszGSs73xCM0l+up7O7a40o90cnrk429LOiK3aeTvfC0HHmldbd81/B7Ix81KSFe1lwkbl7GnBGG4UfuDew==", + "dev": true, + "requires": { + "@babel/types": "^7.0.0", + "@jest/types": "^24.9.0", + "chalk": "^2.0.1", + "expect": "^24.9.0", + "jest-diff": "^24.9.0", + "jest-get-type": "^24.9.0", + "jest-matcher-utils": "^24.9.0", + "jest-message-util": "^24.9.0", + "jest-resolve": "^24.9.0", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "pretty-format": "^24.9.0", + "semver": "^6.2.0" + } + }, + "pretty-format": { + "version": "24.9.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-24.9.0.tgz", + "integrity": "sha512-00ZMZUiHaJrNfk33guavqgvfJS30sLYf0f8+Srklv0AMPodGGHcoHgksZ3OThYnIvOd+8yMCn0YiEOogjlgsnA==", + "dev": true, + "requires": { + "@jest/types": "^24.9.0", + "ansi-regex": "^4.0.0", + "ansi-styles": "^3.2.0", + "react-is": "^16.8.4" + } + }, + "react-is": { + "version": "16.12.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.12.0.tgz", + "integrity": "sha512-rPCkf/mWBtKc97aLL9/txD8DZdemK0vkA3JMLShjlJB3Pj3s+lpf1KaBzMfQrAmhMQB0n1cU/SUGgKKBCe837Q==", + "dev": true + }, + "realpath-native": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/realpath-native/-/realpath-native-1.1.0.tgz", + "integrity": "sha512-wlgPA6cCIIg9gKz0fgAPjnzh4yR/LnXovwuo9hvyGvx3h8nX4+/iLZplfUWasXpqD8BdnGnP5njOFjkUwPzvjA==", + "dev": true, + "requires": { + "util.promisify": "^1.0.0" } }, - "uuid": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-2.0.3.tgz", - "integrity": "sha1-Z+LoY3lyFVMN/zGOW/nc6/1Hsho=" + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "slash": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-2.0.0.tgz", + "integrity": "sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } } } }, + "sockjs": { + "version": "0.3.19", + "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.19.tgz", + "integrity": "sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw==", + "dev": true, + "requires": { + "faye-websocket": "^0.10.0", + "uuid": "^3.0.1" + } + }, "sockjs-client": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.1.5.tgz", - "integrity": "sha1-G7fA9yIsQPQq3xT0RCy9Eml3GoM=", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.4.0.tgz", + "integrity": "sha512-5zaLyO8/nri5cua0VtOrFXBPK1jbL4+1cebT/mmKA1E1ZXOvJrII75bPu0l0k843G/+iAbhEqzyKr0w/eCCj7g==", + "dev": true, "requires": { - "debug": "^2.6.6", - "eventsource": "0.1.6", - "faye-websocket": "~0.11.0", - "inherits": "^2.0.1", + "debug": "^3.2.5", + "eventsource": "^1.0.7", + "faye-websocket": "~0.11.1", + "inherits": "^2.0.3", "json3": "^3.3.2", - "url-parse": "^1.1.8" + "url-parse": "^1.4.3" }, "dependencies": { "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", + "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "dev": true, "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" + } + }, + "faye-websocket": { + "version": "0.11.3", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.3.tgz", + "integrity": "sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA==", + "dev": true, + "requires": { + "websocket-driver": ">=0.5.1" } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true } } }, @@ -12422,6 +17963,7 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", "integrity": "sha1-RBttTTRnmPG05J6JIK37oOVD+a0=", + "dev": true, "requires": { "is-plain-obj": "^1.0.0" } @@ -12429,22 +17971,14 @@ "source-list-map": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/source-list-map/-/source-list-map-2.0.1.tgz", - "integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==" + "integrity": "sha512-qnQ7gVMxGNxsiL4lEuJwe/To8UnK7fAnmbGEEH8RpLouuKbeEm0lhbQVFIrNSuB+G7tVrAlVsZgETT5nljf+Iw==", + "dev": true }, "source-map": { "version": "0.5.7", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=" }, - "source-map-loader": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/source-map-loader/-/source-map-loader-0.2.4.tgz", - "integrity": "sha512-OU6UJUty+i2JDpTItnizPrlpOIBLmQbWMuBg9q5bVtnHACqw1tn9nNwqJLbv0/00JjnJb/Ee5g5WS5vrRv7zIQ==", - "requires": { - "async": "^2.5.0", - "loader-utils": "^1.1.0" - } - }, "source-map-resolve": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.2.tgz", @@ -12461,6 +17995,7 @@ "version": "0.5.9", "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.9.tgz", "integrity": "sha512-gR6Rw4MvUlYy83vP0vxoVNzM6t8MUXqNuRsuBmBHQDu1Fh6X015FrLdgoDKcNdkwGubozq0P4N0Q37UyFVr1EA==", + "dev": true, "requires": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" @@ -12469,7 +18004,8 @@ "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true } } }, @@ -12482,6 +18018,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "dev": true, "requires": { "spdx-expression-parse": "^3.0.0", "spdx-license-ids": "^3.0.0" @@ -12490,12 +18027,14 @@ "spdx-exceptions": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==" + "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", + "dev": true }, "spdx-expression-parse": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "dev": true, "requires": { "spdx-exceptions": "^2.1.0", "spdx-license-ids": "^3.0.0" @@ -12504,51 +18043,77 @@ "spdx-license-ids": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.3.tgz", - "integrity": "sha512-uBIcIl3Ih6Phe3XHK1NqboJLdGfwr1UN3k6wSD1dZpmPsIkb8AGNbZYJ1fOBk834+Gxy8rpfDxrS6XLEMZMY2g==" + "integrity": "sha512-uBIcIl3Ih6Phe3XHK1NqboJLdGfwr1UN3k6wSD1dZpmPsIkb8AGNbZYJ1fOBk834+Gxy8rpfDxrS6XLEMZMY2g==", + "dev": true }, "spdy": { - "version": "3.4.7", - "resolved": "https://registry.npmjs.org/spdy/-/spdy-3.4.7.tgz", - "integrity": "sha1-Qv9B7OXMD5mjpsKKq7c/XDsDrLw=", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz", + "integrity": "sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==", + "dev": true, "requires": { - "debug": "^2.6.8", - "handle-thing": "^1.2.5", + "debug": "^4.1.0", + "handle-thing": "^2.0.0", "http-deceiver": "^1.2.7", - "safe-buffer": "^5.0.1", "select-hose": "^2.0.0", - "spdy-transport": "^2.0.18" + "spdy-transport": "^3.0.0" }, "dependencies": { "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true } } }, "spdy-transport": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/spdy-transport/-/spdy-transport-2.1.1.tgz", - "integrity": "sha512-q7D8c148escoB3Z7ySCASadkegMmUZW8Wb/Q1u0/XBgDKMO880rLQDj8Twiew/tYi7ghemKUi/whSYOwE17f5Q==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/spdy-transport/-/spdy-transport-3.0.0.tgz", + "integrity": "sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==", + "dev": true, "requires": { - "debug": "^2.6.8", - "detect-node": "^2.0.3", + "debug": "^4.1.0", + "detect-node": "^2.0.4", "hpack.js": "^2.1.6", - "obuf": "^1.1.1", - "readable-stream": "^2.2.9", - "safe-buffer": "^5.0.1", - "wbuf": "^1.7.2" + "obuf": "^1.1.2", + "readable-stream": "^3.0.6", + "wbuf": "^1.7.3" }, "dependencies": { "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" } } } @@ -12570,6 +18135,7 @@ "version": "1.16.0", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.0.tgz", "integrity": "sha512-Zhev35/y7hRMcID/upReIvRse+I9SVhyVre/KTJSJQWMz3C3+G+HpO7m1wK/yckEtujKZ7dS4hkVxAnmHaIGVQ==", + "dev": true, "requires": { "asn1": "~0.2.3", "assert-plus": "^1.0.0", @@ -12583,17 +18149,25 @@ } }, "ssri": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-5.3.0.tgz", - "integrity": "sha512-XRSIPqLij52MtgoQavH/x/dU1qVKtWUAAZeOHsR9c2Ddi4XerFy3mc1alf+dLJKl9EUIm/Ht+EowFkTUOA6GAQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-6.0.1.tgz", + "integrity": "sha512-3Wge10hNcT1Kur4PDFwEieXSCMCJs/7WvSACcrMYrNp+b8kDL1/0wJch5Ni2WrtwEa2IO8OsVfeKIciKCDx/QA==", + "dev": true, "requires": { - "safe-buffer": "^5.1.1" + "figgy-pudding": "^3.5.1" } }, + "stable": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz", + "integrity": "sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==", + "dev": true + }, "stack-utils": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-1.0.2.tgz", - "integrity": "sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA==" + "integrity": "sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA==", + "dev": true }, "static-extend": { "version": "0.1.2", @@ -12615,19 +18189,22 @@ } }, "statuses": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz", - "integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew==" + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", + "dev": true }, "stealthy-require": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", - "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=" + "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=", + "dev": true }, "stream-browserify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.1.tgz", "integrity": "sha1-ZiZu5fm9uZQKTkUUyvtDu3Hlyds=", + "dev": true, "requires": { "inherits": "~2.0.1", "readable-stream": "^2.0.2" @@ -12637,15 +18214,26 @@ "version": "1.2.3", "resolved": "https://registry.npmjs.org/stream-each/-/stream-each-1.2.3.tgz", "integrity": "sha512-vlMC2f8I2u/bZGqkdfLQW/13Zihpej/7PmSiMQsbYddxuTsJp8vRe2x2FvVExZg7FaOds43ROAuFJwPR4MTZLw==", + "dev": true, "requires": { "end-of-stream": "^1.1.0", "stream-shift": "^1.0.0" } }, + "stream-events": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/stream-events/-/stream-events-1.0.5.tgz", + "integrity": "sha512-E1GUzBSgvct8Jsb3v2X15pjzN1tYebtbLaMg+eBOUOAxgbLoSbT2NS91ckc5lJD1KfLjId+jXJRgo0qnV5Nerg==", + "dev": true, + "requires": { + "stubs": "^3.0.0" + } + }, "stream-http": { "version": "2.8.3", "resolved": "https://registry.npmjs.org/stream-http/-/stream-http-2.8.3.tgz", "integrity": "sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw==", + "dev": true, "requires": { "builtin-status-codes": "^3.0.0", "inherits": "^2.0.1", @@ -12657,32 +18245,20 @@ "stream-shift": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", - "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=" - }, - "stream-to": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/stream-to/-/stream-to-0.2.2.tgz", - "integrity": "sha1-hDBgmNhf25kLn6MAsbPM9V6O8B0=", + "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=", "dev": true }, - "stream-to-buffer": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/stream-to-buffer/-/stream-to-buffer-0.1.0.tgz", - "integrity": "sha1-JnmdkDqyAlyb1VCsRxcbAPjdgKk=", - "dev": true, - "requires": { - "stream-to": "~0.2.0" - } - }, "strict-uri-encode": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz", - "integrity": "sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM=" + "integrity": "sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM=", + "dev": true }, "string-length": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/string-length/-/string-length-2.0.0.tgz", "integrity": "sha1-1A27aGo6zpYMHP/KVivyxF+DY+0=", + "dev": true, "requires": { "astral-regex": "^1.0.0", "strip-ansi": "^4.0.0" @@ -12691,12 +18267,14 @@ "ansi-regex": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true }, "strip-ansi": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, "requires": { "ansi-regex": "^3.0.0" } @@ -12704,73 +18282,428 @@ } }, "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dev": true, "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" }, "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=" - }, "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, "requires": { - "ansi-regex": "^3.0.0" + "ansi-regex": "^5.0.0" + } + } + } + }, + "string.prototype.matchall": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.2.tgz", + "integrity": "sha512-N/jp6O5fMf9os0JU3E72Qhf590RSRZU/ungsL/qJUYVTNv7hTG0P/dbPjxINVN9jpscu3nzYwKESU3P3RY5tOg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0", + "has-symbols": "^1.0.1", + "internal-slot": "^1.0.2", + "regexp.prototype.flags": "^1.3.0", + "side-channel": "^1.0.2" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "string.prototype.trimleft": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", + "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimstart": "^1.0.0" + } + }, + "string.prototype.trimright": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", + "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimend": "^1.0.0" } } } }, "string.prototype.trim": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.1.2.tgz", - "integrity": "sha1-0E3iyJ4Tf019IG8Ia17S+ua+jOo=", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.0.tgz", + "integrity": "sha512-9EIjYD/WdlvLpn987+ctkLf0FfvBefOCuiEr2henD8X+7jfwPnyvTdmW8OJhj5p+M0/96mBdynLWkxUr+rHlpg==", "dev": true, "requires": { - "define-properties": "^1.1.2", - "es-abstract": "^1.5.0", - "function-bind": "^1.0.2" + "define-properties": "^1.1.3", + "es-abstract": "^1.13.0", + "function-bind": "^1.1.1" + } + }, + "string.prototype.trimend": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.0.tgz", + "integrity": "sha512-EEJnGqa/xNfIg05SxiPSqRS7S9qwDhYts1TSLR1BQfYUfPe1stofgGKvwERK9+9yf+PpfBMlpBaCHucXGPQfUA==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "string.prototype.trimleft": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", + "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimstart": "^1.0.0" + } + }, + "string.prototype.trimright": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", + "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimend": "^1.0.0" + } + } + } + }, + "string.prototype.trimleft": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz", + "integrity": "sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } + }, + "string.prototype.trimright": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz", + "integrity": "sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "function-bind": "^1.1.1" + } + }, + "string.prototype.trimstart": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.0.tgz", + "integrity": "sha512-iCP8g01NFYiiBOnwG1Xc3WZLyoo+RuBymwIlWncShXDDJYWN6DbnM3odslBJdgCdRlq94B5s63NWAZlcn2CS4w==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", + "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.1.5", + "is-regex": "^1.0.5", + "object-inspect": "^1.7.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.0", + "string.prototype.trimleft": "^2.1.1", + "string.prototype.trimright": "^2.1.1" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "dev": true + }, + "is-callable": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true + }, + "is-regex": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", + "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "object-inspect": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true + }, + "string.prototype.trimleft": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", + "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimstart": "^1.0.0" + } + }, + "string.prototype.trimright": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", + "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5", + "string.prototype.trimend": "^1.0.0" + } + } } }, "string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, "requires": { "safe-buffer": "~5.1.0" } }, + "stringify-object": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", + "integrity": "sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==", + "dev": true, + "requires": { + "get-own-enumerable-property-symbols": "^3.0.0", + "is-obj": "^1.0.1", + "is-regexp": "^1.0.0" + }, + "dependencies": { + "is-obj": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", + "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=", + "dev": true + } + } + }, "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, "requires": { - "ansi-regex": "^2.0.0" + "ansi-regex": "^4.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + } } }, "strip-bom": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", + "dev": true, "requires": { "is-utf8": "^0.2.0" } }, + "strip-comments": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/strip-comments/-/strip-comments-1.0.2.tgz", + "integrity": "sha512-kL97alc47hoyIQSV165tTt9rG5dn4w1dNnBhOQ3bOU1Nc1hel09jnXANaHJ7vzHLd4Ju8kseDGzlev96pghLFw==", + "dev": true, + "requires": { + "babel-extract-comments": "^1.0.0", + "babel-plugin-transform-object-rest-spread": "^6.26.0" + } + }, "strip-eof": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=" + "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", + "dev": true }, "strip-indent": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-1.0.1.tgz", "integrity": "sha1-DHlipq3vp7vUrDZkYKY4VSrhoKI=", + "dev": true, "requires": { "get-stdin": "^4.0.1" } @@ -12778,108 +18711,182 @@ "strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=" + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true + }, + "stubs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", + "integrity": "sha1-6NK6H6nJBXAwPAMLaQD31fiavls=", + "dev": true }, "style-loader": { - "version": "0.19.0", - "resolved": "https://registry.npmjs.org/style-loader/-/style-loader-0.19.0.tgz", - "integrity": "sha512-9mx9sC9nX1dgP96MZOODpGC6l1RzQBITI2D5WJhu+wnbrSYVKLGuy14XJSLVQih/0GFrPpjelt+s//VcZQ2Evw==", + "version": "0.23.1", + "resolved": "https://registry.npmjs.org/style-loader/-/style-loader-0.23.1.tgz", + "integrity": "sha512-XK+uv9kWwhZMZ1y7mysB+zoihsEj4wneFWAS5qoiLwzW0WzSqMrrsIy+a3zkQJq0ipFtBpX5W3MqyRIBF/WFGg==", + "dev": true, "requires": { - "loader-utils": "^1.0.2", - "schema-utils": "^0.3.0" + "loader-utils": "^1.1.0", + "schema-utils": "^1.0.0" } }, - "subarg": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/subarg/-/subarg-1.0.0.tgz", - "integrity": "sha1-9izxdYHplrSPyWVpn1TAauJouNI=", - "requires": { - "minimist": "^1.1.0" + "styled-components": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/styled-components/-/styled-components-2.4.1.tgz", + "integrity": "sha1-ZjvQSF1LarRvlGIQ3APSOY0a3nQ=", + "requires": { + "buffer": "^5.0.3", + "css-to-react-native": "^2.0.3", + "fbjs": "^0.8.9", + "hoist-non-react-statics": "^1.2.0", + "is-plain-object": "^2.0.1", + "prop-types": "^15.5.4", + "stylis": "^3.4.0", + "supports-color": "^3.2.3" + }, + "dependencies": { + "buffer": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.6.0.tgz", + "integrity": "sha512-/gDYp/UtU0eA1ys8bOs9J6a+E/KWIY+DZ+Q2WESNUA0jFRsJOc0SNUO6xJ5SGA1xueg3NL65W6s+NY5l9cunuw==", + "requires": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4" + } + }, + "has-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-1.0.0.tgz", + "integrity": "sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=" + }, + "hoist-non-react-statics": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-1.2.0.tgz", + "integrity": "sha1-qkSM8JhtVcxAdzsXF0t90GbLfPs=" + }, + "supports-color": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-3.2.3.tgz", + "integrity": "sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=", + "requires": { + "has-flag": "^1.0.0" + } + } } }, - "super-simple-web-server": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/super-simple-web-server/-/super-simple-web-server-1.1.0.tgz", - "integrity": "sha512-k1YYwsJ4hHnUTCX/5gJqLlkx3c2W71jVJsQuXGptpgRigVUZMHkyg/QZIuA9zuyYPpynZePMN1hwUUtL1yLPfA==", + "stylehacks": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/stylehacks/-/stylehacks-4.0.3.tgz", + "integrity": "sha512-7GlLk9JwlElY4Y6a/rmbH2MhVlTyVmiJd1PfTCqFaIBEGMYNsrO/v3SeGTdhBThLg4Z+NbOk/qFMwCa+J+3p/g==", "dev": true, "requires": { - "express": "^4.16.3" + "browserslist": "^4.0.0", + "postcss": "^7.0.0", + "postcss-selector-parser": "^3.0.0" + }, + "dependencies": { + "postcss-selector-parser": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz", + "integrity": "sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==", + "dev": true, + "requires": { + "dot-prop": "^5.2.0", + "indexes-of": "^1.0.1", + "uniq": "^1.0.1" + } + } } }, + "stylis": { + "version": "3.5.4", + "resolved": "https://registry.npmjs.org/stylis/-/stylis-3.5.4.tgz", + "integrity": "sha512-8/3pSmthWM7lsPBKv7NXkzn2Uc9W7NotcwGNpJaa3k7WMM1XDCA4MgT5k/8BIexd5ydZdboXtU90XH9Ec4Bv/Q==" + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, "requires": { "has-flag": "^3.0.0" } }, + "supports-hyperlinks": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/supports-hyperlinks/-/supports-hyperlinks-1.0.1.tgz", + "integrity": "sha512-HHi5kVSefKaJkGYXbDuKbUGRVxqnWGn3J2e39CYcNJEfWciGq2zYtOhXLTlvrOZW1QU7VX67w7fMmWafHX9Pfw==", + "dev": true, + "requires": { + "has-flag": "^2.0.0", + "supports-color": "^5.0.0" + }, + "dependencies": { + "has-flag": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", + "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=", + "dev": true + } + } + }, + "svg-parser": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz", + "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==", + "dev": true + }, "svgo": { - "version": "0.7.2", - "resolved": "https://registry.npmjs.org/svgo/-/svgo-0.7.2.tgz", - "integrity": "sha1-n1dyQTlSE1xv779Ar+ak+qiLS7U=", - "requires": { - "coa": "~1.0.1", - "colors": "~1.1.2", - "csso": "~2.3.1", - "js-yaml": "~3.7.0", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-1.3.2.tgz", + "integrity": "sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw==", + "dev": true, + "requires": { + "chalk": "^2.4.1", + "coa": "^2.0.2", + "css-select": "^2.0.0", + "css-select-base-adapter": "^0.1.1", + "css-tree": "1.0.0-alpha.37", + "csso": "^4.0.2", + "js-yaml": "^3.13.1", "mkdirp": "~0.5.1", - "sax": "~1.2.1", - "whet.extend": "~0.9.9" + "object.values": "^1.1.0", + "sax": "~1.2.4", + "stable": "^0.1.8", + "unquote": "~1.1.1", + "util.promisify": "~1.0.0" }, "dependencies": { - "esprima": { - "version": "2.7.3", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-2.7.3.tgz", - "integrity": "sha1-luO3DVd59q1JzQMmc9HDEnZ7pYE=" + "css-select": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-2.1.0.tgz", + "integrity": "sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ==", + "dev": true, + "requires": { + "boolbase": "^1.0.0", + "css-what": "^3.2.1", + "domutils": "^1.7.0", + "nth-check": "^1.0.2" + } + }, + "css-what": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-3.2.1.tgz", + "integrity": "sha512-WwOrosiQTvyms+Ti5ZC5vGEK0Vod3FTt1ca+payZqvKuGJF+dq7bG63DstxtN0dpm6FxY27a/zS3Wten+gEtGw==", + "dev": true }, - "js-yaml": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.7.0.tgz", - "integrity": "sha1-XJZ93YN6m/3KXy3oQlOr6KHAO4A=", + "domutils": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.7.0.tgz", + "integrity": "sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg==", + "dev": true, "requires": { - "argparse": "^1.0.7", - "esprima": "^2.6.0" + "dom-serializer": "0", + "domelementtype": "1" } } } }, - "sw-precache": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/sw-precache/-/sw-precache-5.2.1.tgz", - "integrity": "sha512-8FAy+BP/FXE+ILfiVTt+GQJ6UEf4CVHD9OfhzH0JX+3zoy2uFk7Vn9EfXASOtVmmIVbL3jE/W8Z66VgPSZcMhw==", - "requires": { - "dom-urls": "^1.1.0", - "es6-promise": "^4.0.5", - "glob": "^7.1.1", - "lodash.defaults": "^4.2.0", - "lodash.template": "^4.4.0", - "meow": "^3.7.0", - "mkdirp": "^0.5.1", - "pretty-bytes": "^4.0.2", - "sw-toolbox": "^3.4.0", - "update-notifier": "^2.3.0" - } - }, - "sw-precache-webpack-plugin": { - "version": "0.11.4", - "resolved": "https://registry.npmjs.org/sw-precache-webpack-plugin/-/sw-precache-webpack-plugin-0.11.4.tgz", - "integrity": "sha1-ppUBflTu1XVVFJOlGdwdqNotxeA=", - "requires": { - "del": "^2.2.2", - "sw-precache": "^5.1.1", - "uglify-js": "^3.0.13" - } - }, - "sw-toolbox": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/sw-toolbox/-/sw-toolbox-3.6.0.tgz", - "integrity": "sha1-Jt8dHHA0hljk3qKIQxkUm3sxg7U=", - "requires": { - "path-to-regexp": "^1.0.1", - "serviceworker-cache-polyfill": "^4.0.0" - } - }, "swagger-schema-official": { "version": "2.0.0-bab6bed", "resolved": "https://registry.npmjs.org/swagger-schema-official/-/swagger-schema-official-2.0.0-bab6bed.tgz", @@ -12917,192 +18924,518 @@ "integrity": "sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==" }, "symbol-tree": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.2.tgz", - "integrity": "sha1-rifbOPZgp64uHDt9G8KQgZuFGeY=" + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", + "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", + "dev": true + }, + "table": { + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", + "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "dev": true, + "requires": { + "ajv": "^6.10.2", + "lodash": "^4.17.14", + "slice-ansi": "^2.1.0", + "string-width": "^3.0.0" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } }, "tapable": { - "version": "0.2.9", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-0.2.9.tgz", - "integrity": "sha512-2wsvQ+4GwBvLPLWsNfLCDYGsW6xb7aeC6utq2Qh0PFwgEy7K7dsma9Jsmb2zSQj7GvYAyUGSntLtsv++GmgL1A==" + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz", + "integrity": "sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==", + "dev": true }, - "temp": { - "version": "0.8.3", - "resolved": "https://registry.npmjs.org/temp/-/temp-0.8.3.tgz", - "integrity": "sha1-4Ma8TSa5AxJEEOT+2BEDAU38H1k=", + "teeny-request": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-5.3.1.tgz", + "integrity": "sha512-hnUeun3xryzv92FbrnprltcdeDfSVaGFBlFPRvKJ2fO/ioQx9N0aSUbbXSfTO+ArRXine1gSWdWFWcgfrggWXw==", "dev": true, "requires": { - "os-tmpdir": "^1.0.0", - "rimraf": "~2.2.6" + "http-proxy-agent": "^2.1.0", + "https-proxy-agent": "^3.0.0", + "node-fetch": "^2.2.0", + "stream-events": "^1.0.5", + "uuid": "^3.3.2" }, "dependencies": { - "rimraf": { - "version": "2.2.8", - "resolved": "http://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz", - "integrity": "sha1-5Dm+Kq7jJzIZUnMPmaiSnk/FBYI=", + "agent-base": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.3.0.tgz", + "integrity": "sha512-salcGninV0nPrwpGNn4VTXBb1SOuXQBiqbrNXoeizJsHrsL6ERFM2Ne3JUSBWRE6aeNJI2ROP/WEEIDUiDe3cg==", + "dev": true, + "requires": { + "es6-promisify": "^5.0.0" + } + }, + "https-proxy-agent": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-3.0.1.tgz", + "integrity": "sha512-+ML2Rbh6DAuee7d07tYGEKOEi2voWPUGan+ExdPbPW6Z3svq+JCqr0v8WmKPOkz1vOVykPCBSuobe7G8GJUtVg==", + "dev": true, + "requires": { + "agent-base": "^4.3.0", + "debug": "^3.1.0" + } + }, + "node-fetch": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz", + "integrity": "sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==", "dev": true } } }, - "term-size": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/term-size/-/term-size-1.2.0.tgz", - "integrity": "sha1-RYuDiH8oj8Vtb/+/rSYuJmOO+mk=", + "terser": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/terser/-/terser-4.6.3.tgz", + "integrity": "sha512-Lw+ieAXmY69d09IIc/yqeBqXpEQIpDGZqT34ui1QWXIUpR2RjbqEkT8X7Lgex19hslSqcWM5iMN2kM11eMsESQ==", + "dev": true, + "requires": { + "commander": "^2.20.0", + "source-map": "~0.6.1", + "source-map-support": "~0.5.12" + }, + "dependencies": { + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "source-map-support": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.16.tgz", + "integrity": "sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + } + } + }, + "terser-webpack-plugin": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-2.3.5.tgz", + "integrity": "sha512-WlWksUoq+E4+JlJ+h+U+QUzXpcsMSSNXkDy9lBVkSqDn1w23Gg29L/ary9GeJVYCGiNJJX7LnVc4bwL1N3/g1w==", + "dev": true, "requires": { - "execa": "^0.7.0" + "cacache": "^13.0.1", + "find-cache-dir": "^3.2.0", + "jest-worker": "^25.1.0", + "p-limit": "^2.2.2", + "schema-utils": "^2.6.4", + "serialize-javascript": "^2.1.2", + "source-map": "^0.6.1", + "terser": "^4.4.3", + "webpack-sources": "^1.4.3" + }, + "dependencies": { + "ajv-keywords": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", + "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==", + "dev": true + }, + "cacache": { + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-13.0.1.tgz", + "integrity": "sha512-5ZvAxd05HDDU+y9BVvcqYu2LLXmPnQ0hW62h32g4xBTgL/MppR4/04NHfj/ycM2y6lmTnbw6HVi+1eN0Psba6w==", + "dev": true, + "requires": { + "chownr": "^1.1.2", + "figgy-pudding": "^3.5.1", + "fs-minipass": "^2.0.0", + "glob": "^7.1.4", + "graceful-fs": "^4.2.2", + "infer-owner": "^1.0.4", + "lru-cache": "^5.1.1", + "minipass": "^3.0.0", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.2", + "mkdirp": "^0.5.1", + "move-concurrently": "^1.0.1", + "p-map": "^3.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^2.7.1", + "ssri": "^7.0.0", + "unique-filename": "^1.1.1" + } + }, + "chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "dev": true + }, + "find-cache-dir": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", + "integrity": "sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==", + "dev": true, + "requires": { + "commondir": "^1.0.1", + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" + } + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "graceful-fs": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz", + "integrity": "sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "jest-worker": { + "version": "25.2.6", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-25.2.6.tgz", + "integrity": "sha512-FJn9XDUSxcOR4cwDzRfL1z56rUofNTFs539FGASpd50RHdb6EVkhxQqktodW2mI49l+W3H+tFJDotCHUQF6dmA==", + "dev": true, + "requires": { + "merge-stream": "^2.0.0", + "supports-color": "^7.0.0" + } + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "make-dir": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.0.2.tgz", + "integrity": "sha512-rYKABKutXa6vXTXhoV18cBE7PaewPXHe/Bdq4v+ZLMhxbWApkFFplT0LcbMW+6BbjnQXzZ/sAvSE/JdguApG5w==", + "dev": true, + "requires": { + "semver": "^6.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "requires": { + "find-up": "^4.0.0" + } + }, + "rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, + "schema-utils": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.6.5.tgz", + "integrity": "sha512-5KXuwKziQrTVHh8j/Uxz+QUbxkaLW9X/86NBlx/gnKgtsZA2GIVMUn17qWhRFwF8jdYb3Dig5hRO/W5mZqy6SQ==", + "dev": true, + "requires": { + "ajv": "^6.12.0", + "ajv-keywords": "^3.4.1" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "ssri": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-7.1.0.tgz", + "integrity": "sha512-77/WrDZUWocK0mvA5NTRQyveUf+wsrIc6vyrxpS8tVvYBcX215QbafrJR3KtkpskIzoFLqqNuuYQvxaMjXJ/0g==", + "dev": true, + "requires": { + "figgy-pudding": "^3.5.1", + "minipass": "^3.1.1" + } + }, + "supports-color": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.1.0.tgz", + "integrity": "sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "test-exclude": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-4.2.3.tgz", - "integrity": "sha512-SYbXgY64PT+4GAL2ocI3HwPa4Q4TBKm0cwAVeKOt/Aoc0gSpNRjJX8w0pA1LMKZ3LBmd8pYBqApFNQLII9kavA==", + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", + "integrity": "sha512-M+oxtseCFO3EDtAaGH7iiej3CBkzXqFMbzqYAACdzKui4eZA+pq3tZEwChvOdNfa7xxy8BfbmgJSIr43cC/+2g==", + "dev": true, "requires": { - "arrify": "^1.0.1", - "micromatch": "^2.3.11", - "object-assign": "^4.1.0", - "read-pkg-up": "^1.0.1", - "require-main-filename": "^1.0.1" + "glob": "^7.1.3", + "minimatch": "^3.0.4", + "read-pkg-up": "^4.0.0", + "require-main-filename": "^2.0.0" }, "dependencies": { - "arr-diff": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-2.0.0.tgz", - "integrity": "sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8=", + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "load-json-file": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^4.0.0", + "pify": "^3.0.0", + "strip-bom": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, "requires": { - "arr-flatten": "^1.0.1" + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" } }, - "array-unique": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz", - "integrity": "sha1-odl8yvy8JiXMcPrc6zalDFiwGlM=" - }, - "braces": { - "version": "1.8.5", - "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.5.tgz", - "integrity": "sha1-uneWLhLf+WnWt2cR6RS3N4V79qc=", + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, "requires": { - "expand-range": "^1.8.1", - "preserve": "^0.2.0", - "repeat-element": "^1.1.2" + "p-try": "^2.0.0" } }, - "expand-brackets": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz", - "integrity": "sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s=", + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, "requires": { - "is-posix-bracket": "^0.1.0" + "p-limit": "^2.0.0" } }, - "extglob": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.2.tgz", - "integrity": "sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE=", + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, "requires": { - "is-extglob": "^1.0.0" + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" } }, - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", + "path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "dev": true, "requires": { - "is-extglob": "^1.0.0" + "pify": "^3.0.0" } }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true + }, + "read-pkg": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", + "dev": true, "requires": { - "is-buffer": "^1.1.5" + "load-json-file": "^4.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^3.0.0" } }, - "micromatch": { - "version": "2.3.11", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz", - "integrity": "sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU=", + "read-pkg-up": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", + "integrity": "sha512-6etQSH7nJGsK0RbG/2TeDzZFa8shjQ1um+SwQQ5cwKy0dhSXdOncEhb1CPpvQG4h7FyOV6EB6YlV0yJvZQNAkA==", + "dev": true, "requires": { - "arr-diff": "^2.0.0", - "array-unique": "^0.2.1", - "braces": "^1.8.2", - "expand-brackets": "^0.1.4", - "extglob": "^0.3.1", - "filename-regex": "^2.0.0", - "is-extglob": "^1.0.0", - "is-glob": "^2.0.1", - "kind-of": "^3.0.2", - "normalize-path": "^2.0.1", - "object.omit": "^2.0.0", - "parse-glob": "^3.0.4", - "regex-cache": "^0.4.2" + "find-up": "^3.0.0", + "read-pkg": "^3.0.0" } + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true } } }, "text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=" + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true }, "throat": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/throat/-/throat-4.1.0.tgz", - "integrity": "sha1-iQN8vJLFarGJJua6TLsgDhVnKmo=" - }, - "throttleit": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/throttleit/-/throttleit-1.0.0.tgz", - "integrity": "sha1-nnhYNtr0Z0MUWlmEtiaNgoUorGw=", + "integrity": "sha1-iQN8vJLFarGJJua6TLsgDhVnKmo=", "dev": true }, "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", + "dev": true }, "through2": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dev": true, "requires": { "readable-stream": "~2.3.6", "xtend": "~4.0.1" } }, "thunky": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.0.3.tgz", - "integrity": "sha512-YwT8pjmNcAXBZqrubu22P4FYsh2D4dxRmnWBOL8Jk8bUcRUtc5326kx32tuTmFDAZtLOGEVNl8POAR8j896Iow==" - }, - "time-stamp": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/time-stamp/-/time-stamp-2.2.0.tgz", - "integrity": "sha512-zxke8goJQpBeEgD82CXABeMh0LSJcj7CXEd0OHOg45HgcofF7pxNwZm9+RknpxpDhwN4gFpySkApKfFYfRQnUA==" - }, - "timed-out": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", - "integrity": "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8=" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", + "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==", + "dev": true }, "timers-browserify": { "version": "2.0.10", "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.10.tgz", "integrity": "sha512-YvC1SV1XdOUaL6gx5CoGroT3Gu49pK9+TZ38ErPldOWW4j49GI1HKs9DV+KGq/w6y+LZ72W1c8cKz2vzY+qpzg==", + "dev": true, "requires": { "setimmediate": "^1.0.4" } }, - "tinycolor2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/tinycolor2/-/tinycolor2-1.4.1.tgz", - "integrity": "sha1-9PrTM0R7wLB9TcjpIJ2POaisd+g=", + "timsort": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/timsort/-/timsort-0.3.0.tgz", + "integrity": "sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q=", "dev": true }, "title-case": { @@ -13119,6 +19452,7 @@ "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, "requires": { "os-tmpdir": "~1.0.2" } @@ -13126,17 +19460,20 @@ "tmpl": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz", - "integrity": "sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=" + "integrity": "sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE=", + "dev": true }, "to-arraybuffer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz", - "integrity": "sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=" + "integrity": "sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=", + "dev": true }, "to-fast-properties": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-1.0.3.tgz", - "integrity": "sha1-uDVx+k2MJbguIxsG46MFXeTKGkc=" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true }, "to-object-path": { "version": "0.3.0", @@ -13176,15 +19513,17 @@ "repeat-string": "^1.6.1" } }, - "toposort": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/toposort/-/toposort-1.0.7.tgz", - "integrity": "sha1-LmhELZ9k7HILjMieZEOsbKqVACk=" + "toidentifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", + "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==", + "dev": true }, "tough-cookie": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "dev": true, "requires": { "psl": "^1.1.28", "punycode": "^2.1.1" @@ -13194,25 +19533,16 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/tr46/-/tr46-1.0.1.tgz", "integrity": "sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk=", + "dev": true, "requires": { "punycode": "^2.1.0" } }, - "trim": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz", - "integrity": "sha1-WFhUf2spB1fulczMZm+1AITEYN0=", - "dev": true - }, "trim-newlines": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-1.0.0.tgz", - "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=" - }, - "trim-right": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/trim-right/-/trim-right-1.0.1.tgz", - "integrity": "sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=" + "integrity": "sha1-WIeWa7WCpFA6QetST301ARgVphM=", + "dev": true }, "tryer": { "version": "1.0.1", @@ -13220,66 +19550,6 @@ "integrity": "sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA==", "dev": true }, - "ts-jest": { - "version": "22.0.1", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-22.0.1.tgz", - "integrity": "sha512-bc781gViU95lRZF0kzkHiincwmVu96jbC8MFk2SXUCrSj3Zx8sMC6c6gJnIluVQkm8yYaBl5ucqLnwHNRl5l0Q==", - "requires": { - "babel-core": "^6.24.1", - "babel-plugin-istanbul": "^4.1.4", - "babel-plugin-transform-es2015-modules-commonjs": "^6.24.1", - "babel-preset-jest": "^22.0.1", - "cpx": "^1.5.0", - "fs-extra": "4.0.3", - "jest-config": "^22.0.1", - "pkg-dir": "^2.0.0", - "source-map-support": "^0.5.0", - "yargs": "^10.0.3" - }, - "dependencies": { - "fs-extra": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-4.0.3.tgz", - "integrity": "sha512-q6rbdDd1o2mAnQreO7YADIxf/Whx4AHBiRf6d+/cVT8h44ss+lHgxf1FemcqDnQt9X3ct4McHr+JMGlYSsK7Cg==", - "requires": { - "graceful-fs": "^4.1.2", - "jsonfile": "^4.0.0", - "universalify": "^0.1.0" - } - }, - "jsonfile": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", - "integrity": "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss=", - "requires": { - "graceful-fs": "^4.1.6" - } - } - } - }, - "ts-loader": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-2.3.7.tgz", - "integrity": "sha512-8t3bu2FcEkXb+D4L+Cn8qiK2E2C6Ms4/GQChvz6IMbVurcFHLXrhW4EMtfaol1a1ASQACZGDUGit4NHnX9g7hQ==", - "requires": { - "chalk": "^2.0.1", - "enhanced-resolve": "^3.0.0", - "loader-utils": "^1.0.2", - "semver": "^5.0.1" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } - } - }, "ts-node": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-7.0.1.tgz", @@ -13331,20 +19601,11 @@ } } }, - "ts-protoc-gen": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/ts-protoc-gen/-/ts-protoc-gen-0.10.0.tgz", - "integrity": "sha512-EEbgDWNHK3CvcNhmib94I4HMO23qLddjLRdXW8EUE11VJxbi3n5J0l2DiX/L1pijOaPTkbEoRK+zQinKgKGqsw==", - "requires": { - "google-protobuf": "^3.6.1" - }, - "dependencies": { - "google-protobuf": { - "version": "3.9.1", - "resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.9.1.tgz", - "integrity": "sha512-tkz7SVwBktFbqFK3teXFUY/VM57+mbUgV9bSD+sZH1ocHJ7uk7BfEWMRdU24dd0ciUDokreA7ghH2fYFIczQdw==" - } - } + "ts-pnp": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/ts-pnp/-/ts-pnp-1.1.6.tgz", + "integrity": "sha512-CrG5GqAAzMT7144Cl+UIFP7mz/iIhiy+xQ6GGcnjTezhALT02uPMRw7tgDSESgB5MsfKt55+GPWw4ir1kVtMIQ==", + "dev": true }, "tsconfig": { "version": "7.0.0", @@ -13366,104 +19627,23 @@ } } }, - "tsconfig-paths": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.7.0.tgz", - "integrity": "sha512-7iE+Q/2E1lgvxD+c0Ot+GFFmgmfIjt/zCayyruXkXQ84BLT85gHXy0WSoQSiuFX9+d+keE/jiON7notV74ZY+A==", - "requires": { - "@types/json5": "^0.0.29", - "deepmerge": "^2.0.1", - "json5": "^1.0.1", - "minimist": "^1.2.0", - "strip-bom": "^3.0.0" - }, - "dependencies": { - "deepmerge": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz", - "integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==" - }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=" - } - } - }, - "tsconfig-paths-webpack-plugin": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/tsconfig-paths-webpack-plugin/-/tsconfig-paths-webpack-plugin-2.0.0.tgz", - "integrity": "sha512-reAnVEGP7mNwOcXXYxQpsH7uY8blNJM/xgN2KYttVX+qkwfqA+nhRPpA7Fnomnlhm5Jz0EoSVwk4rtQu8hC54g==", - "requires": { - "chalk": "^2.3.0", - "tsconfig-paths": "^3.1.1" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } - } - }, "tslib": { "version": "1.9.3", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.9.3.tgz", - "integrity": "sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ==" - }, - "tslint": { - "version": "5.12.0", - "resolved": "https://registry.npmjs.org/tslint/-/tslint-5.12.0.tgz", - "integrity": "sha512-CKEcH1MHUBhoV43SA/Jmy1l24HJJgI0eyLbBNSRyFlsQvb9v6Zdq+Nz2vEOH00nC5SUx4SneJ59PZUS/ARcokQ==", - "requires": { - "babel-code-frame": "^6.22.0", - "builtin-modules": "^1.1.1", - "chalk": "^2.3.0", - "commander": "^2.12.1", - "diff": "^3.2.0", - "glob": "^7.1.1", - "js-yaml": "^3.7.0", - "minimatch": "^3.0.4", - "resolve": "^1.3.2", - "semver": "^5.3.0", - "tslib": "^1.8.0", - "tsutils": "^2.27.2" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } - } + "integrity": "sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ==", + "dev": true }, "tslint-config-prettier": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/tslint-config-prettier/-/tslint-config-prettier-1.17.0.tgz", - "integrity": "sha512-NKWNkThwqE4Snn4Cm6SZB7lV5RMDDFsBwz6fWUkTxOKGjMx8ycOHnjIbhn7dZd5XmssW3CwqUjlANR6EhP9YQw==" - }, - "tslint-react": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/tslint-react/-/tslint-react-3.6.0.tgz", - "integrity": "sha512-AIv1QcsSnj7e9pFir6cJ6vIncTqxfqeFF3Lzh8SuuBljueYzEAtByuB6zMaD27BL0xhMEqsZ9s5eHuCONydjBw==", - "requires": { - "tsutils": "^2.13.1" - } + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/tslint-config-prettier/-/tslint-config-prettier-1.18.0.tgz", + "integrity": "sha512-xPw9PgNPLG3iKRxmK7DWr+Ea/SzrvfHtjFt5LBl61gk2UBG/DB9kCXRjv+xyIU1rUtnayLeMUVJBcMX8Z17nDg==", + "dev": true }, "tsutils": { - "version": "2.29.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-2.29.0.tgz", - "integrity": "sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA==", + "version": "3.17.1", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.17.1.tgz", + "integrity": "sha512-kzeQ5B8H3w60nFY2g8cJIuH7JDpsALXySGtwGJ0p2LSjLgay3NdIpqq5SoOBe46bKDW2iq25irHCr8wjomUS2g==", + "dev": true, "requires": { "tslib": "^1.8.1" } @@ -13471,12 +19651,14 @@ "tty-browserify": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", - "integrity": "sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=" + "integrity": "sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=", + "dev": true }, "tunnel-agent": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "dev": true, "requires": { "safe-buffer": "^5.0.1" } @@ -13484,43 +19666,85 @@ "tweetnacl": { "version": "0.14.5", "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", + "dev": true + }, + "type": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/type/-/type-1.2.0.tgz", + "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==", + "dev": true }, "type-check": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dev": true, "requires": { "prelude-ls": "~1.1.2" } }, + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true + }, "type-is": { - "version": "1.6.16", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz", - "integrity": "sha512-HRkVv/5qY2G6I8iab9cI7v1bOIdhm94dVjQCPFElW9W+3GeDOSHmy2EBYe4VTApuzolPcmgFTN3ftVJRKR2J9Q==", + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dev": true, "requires": { "media-typer": "0.3.0", - "mime-types": "~2.1.18" + "mime-types": "~2.1.24" + }, + "dependencies": { + "mime-db": { + "version": "1.43.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.43.0.tgz", + "integrity": "sha512-+5dsGEEovYbT8UY9yD7eE4XTc4UwJ1jBYlgaQQF38ENsKR3wj/8q8RFZrF9WIZpB2V1ArTVFUva8sAul1NzRzQ==", + "dev": true + }, + "mime-types": { + "version": "2.1.26", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.26.tgz", + "integrity": "sha512-01paPWYgLrkqAyrlDorC1uDwl2p3qZT7yl806vW7DvDoxwXi46jsjFbg+WdwotBIk6/MbEhO/dh5aZ5sNj/dWQ==", + "dev": true, + "requires": { + "mime-db": "1.43.0" + } + } } }, "typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", - "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=", + "dev": true + }, + "typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "dev": true, + "requires": { + "is-typedarray": "^1.0.0" + } }, "typescript": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.3.1.tgz", - "integrity": "sha512-cTmIDFW7O0IHbn1DPYjkiebHxwtCMU+eTy30ZtJNBPF9j2O1ITu5XH2YnBeVRKWHqF+3JQwWJv0Q0aUgX8W7IA==", + "version": "3.7.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.5.tgz", + "integrity": "sha512-/P5lkRXkWHNAbcJIiHPfRoKqyd7bsyCma1hZNUGfn20qm64T6ZBlrzprymeu918H+mB/0rIg2gGK/BXkhhYgBw==", "dev": true }, "typestyle": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/typestyle/-/typestyle-2.0.1.tgz", - "integrity": "sha512-3Mv5ZZbYJ3y3G6rX3iRLrgYibAlafK2nsc9VlTsYcEaK8w+9vtNDx0T2TJsznI5FIh+WoBnjJ5F0/26WaGRxXQ==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/typestyle/-/typestyle-2.0.4.tgz", + "integrity": "sha512-+57eGqcEjiAc51hB/zXnZFoVuzwuxb9WbPpb1VT2zPJPIo88wGXod7dHa0IJ1Ue+sncHj2WZMZEPJRAqwVraoA==", "requires": { "csstype": "^2.4.0", - "free-style": "2.5.1" + "free-style": "2.6.1" } }, "ua-parser-js": { @@ -13528,156 +19752,89 @@ "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.19.tgz", "integrity": "sha512-T3PVJ6uz8i0HzPxOF9SWzWAlfN/DavlpQqepn22xgve/5QecC+XMCAtmUNnY7C9StehaV6exjUCI801lOI7QlQ==" }, - "uc.micro": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", - "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" - }, "uglify-js": { - "version": "3.4.9", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.4.9.tgz", - "integrity": "sha512-8CJsbKOtEbnJsTyv6LE6m6ZKniqMiFWmm9sRbopbkGs3gMPPfd3Fh8iIA4Ykv5MgaTbqHr4BaoGLJLZNhsrW1Q==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.8.1.tgz", + "integrity": "sha512-W7KxyzeaQmZvUFbGj4+YFshhVrMBGSg2IbcYAjGWGvx8DHvJMclbTDMpffdxFUGPBHjIytk7KJUR/KUXstUGDw==", + "dev": true, + "optional": true, "requires": { - "commander": "~2.17.1", + "commander": "~2.20.3", "source-map": "~0.6.1" }, "dependencies": { "commander": { - "version": "2.17.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.17.1.tgz", - "integrity": "sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==" + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true, + "optional": true }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true } } }, - "uglify-to-browserify": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz", - "integrity": "sha1-bgkk1r2mta/jSeOabWMoUKD4grc=", - "optional": true + "unicode-canonical-property-names-ecmascript": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz", + "integrity": "sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ==", + "dev": true }, - "uglifyjs-webpack-plugin": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/uglifyjs-webpack-plugin/-/uglifyjs-webpack-plugin-1.3.0.tgz", - "integrity": "sha512-ovHIch0AMlxjD/97j9AYovZxG5wnHOPkL7T1GKochBADp/Zwc44pEWNqpKl1Loupp1WhFg7SlYmHZRUfdAacgw==", + "unicode-match-property-ecmascript": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz", + "integrity": "sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg==", + "dev": true, "requires": { - "cacache": "^10.0.4", - "find-cache-dir": "^1.0.0", - "schema-utils": "^0.4.5", - "serialize-javascript": "^1.4.0", - "source-map": "^0.6.1", - "uglify-es": "^3.3.4", - "webpack-sources": "^1.1.0", - "worker-farm": "^1.5.2" - }, - "dependencies": { - "ajv": { - "version": "6.6.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.6.2.tgz", - "integrity": "sha512-FBHEW6Jf5TB9MGBgUUA9XHkTbjXYfAUjY43ACMfmdMRHniyoMHjHjzD50OK8LGDWQwp4rWEsIq5kEqq7rvIM1g==", - "requires": { - "fast-deep-equal": "^2.0.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "commander": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.13.0.tgz", - "integrity": "sha512-MVuS359B+YzaWqjCL/c+22gfryv+mCBPHAv3zyVI2GN8EY6IRP8VwtasXn8jyyhvvq84R4ImN1OKRtcbIasjYA==" - }, - "fast-deep-equal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" - }, - "schema-utils": { - "version": "0.4.7", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-0.4.7.tgz", - "integrity": "sha512-v/iwU6wvwGK8HbU9yi3/nhGzP0yGSuhQMzL6ySiec1FSrZZDkhm4noOSWzrNFo/jEc+SJY6jRTwuwbSXJPDUnQ==", - "requires": { - "ajv": "^6.1.0", - "ajv-keywords": "^3.1.0" - } - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" - }, - "uglify-es": { - "version": "3.3.9", - "resolved": "https://registry.npmjs.org/uglify-es/-/uglify-es-3.3.9.tgz", - "integrity": "sha512-r+MU0rfv4L/0eeW3xZrd16t4NZfK8Ld4SWVglYBb7ez5uXFWHuVRs6xCTrf1yirs9a4j4Y27nn7SRfO6v67XsQ==", - "requires": { - "commander": "~2.13.0", - "source-map": "~0.6.1" - } - } + "unicode-canonical-property-names-ecmascript": "^1.0.4", + "unicode-property-aliases-ecmascript": "^1.0.4" } }, - "ultron": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ultron/-/ultron-1.1.1.tgz", - "integrity": "sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og==", + "unicode-match-property-value-ecmascript": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.2.0.tgz", + "integrity": "sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ==", + "dev": true + }, + "unicode-property-aliases-ecmascript": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.1.0.tgz", + "integrity": "sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg==", "dev": true }, "union-value": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.0.tgz", - "integrity": "sha1-XHHDTLW61dzr4+oM0IIHulqhrqQ=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", + "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", "requires": { "arr-union": "^3.1.0", "get-value": "^2.0.6", "is-extendable": "^0.1.1", - "set-value": "^0.4.3" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "requires": { - "is-extendable": "^0.1.0" - } - }, - "set-value": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/set-value/-/set-value-0.4.3.tgz", - "integrity": "sha1-fbCPnT0i3H945Trzw79GZuzfzPE=", - "requires": { - "extend-shallow": "^2.0.1", - "is-extendable": "^0.1.1", - "is-plain-object": "^2.0.1", - "to-object-path": "^0.3.0" - } - } + "set-value": "^2.0.1" } }, "uniq": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/uniq/-/uniq-1.0.1.tgz", - "integrity": "sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=" + "integrity": "sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8=", + "dev": true }, "uniqs": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/uniqs/-/uniqs-2.0.0.tgz", - "integrity": "sha1-/+3ks2slKQaW5uFl1KWe25mOawI=" + "integrity": "sha1-/+3ks2slKQaW5uFl1KWe25mOawI=", + "dev": true }, "unique-filename": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", "integrity": "sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==", + "dev": true, "requires": { "unique-slug": "^2.0.0" } @@ -13686,27 +19843,27 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.1.tgz", "integrity": "sha512-n9cU6+gITaVu7VGj1Z8feKMmfAjEAQGhwD9fE3zvpRRa0wEIx8ODYkVGfSc94M2OX00tUFV8wH3zYbm1I8mxFg==", + "dev": true, "requires": { "imurmurhash": "^0.1.4" } }, - "unique-string": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-1.0.0.tgz", - "integrity": "sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo=", - "requires": { - "crypto-random-string": "^1.0.0" - } - }, "universalify": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", - "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=", + "dev": true + }, + "unquote": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/unquote/-/unquote-1.1.1.tgz", + "integrity": "sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ=" }, "unset-value": { "version": "1.0.0", @@ -13744,49 +19901,17 @@ } } }, - "unzip-response": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/unzip-response/-/unzip-response-2.0.1.tgz", - "integrity": "sha1-0vD3N9FrBhXnKmk17QQhRXLVb5c=" - }, "upath": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/upath/-/upath-1.1.0.tgz", - "integrity": "sha512-bzpH/oBhoS/QI/YtbkqCg6VEiPYjSZtrHQM6/QnJS6OL9pKUFLqb3aFh4Scvwm45+7iAgiMkLhSbaZxUqmrprw==" - }, - "update-notifier": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-2.5.0.tgz", - "integrity": "sha512-gwMdhgJHGuj/+wHJJs9e6PcCszpxR1b236igrOkUofGhqJuG+amlIKwApH1IW1WWl7ovZxsX49lMBWLxSdm5Dw==", - "requires": { - "boxen": "^1.2.1", - "chalk": "^2.0.1", - "configstore": "^3.0.0", - "import-lazy": "^2.1.0", - "is-ci": "^1.0.10", - "is-installed-globally": "^0.1.0", - "is-npm": "^1.0.0", - "latest-version": "^3.0.0", - "semver-diff": "^2.0.0", - "xdg-basedir": "^3.0.0" - }, - "dependencies": { - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - } - } + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", + "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==", + "dev": true }, "upper-case": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/upper-case/-/upper-case-1.1.3.tgz", - "integrity": "sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg=" + "integrity": "sha1-9rRQHC7EzdJrp4vnIilh3ndiFZg=", + "dev": true }, "upper-case-first": { "version": "1.1.2", @@ -13801,15 +19926,11 @@ "version": "4.2.2", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "dev": true, "requires": { "punycode": "^2.1.0" } }, - "urijs": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/urijs/-/urijs-1.19.1.tgz", - "integrity": "sha512-xVrGVi94ueCJNrBSTjWqjvtgvl3cyOTThp2zaMaFNGp3F542TR6sM3f2o8RqZl+AwteClSVmoCyt0ka4RjQOQg==" - }, "urix": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", @@ -13819,6 +19940,7 @@ "version": "0.11.0", "resolved": "https://registry.npmjs.org/url/-/url-0.11.0.tgz", "integrity": "sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=", + "dev": true, "requires": { "punycode": "1.3.2", "querystring": "0.2.0" @@ -13827,44 +19949,54 @@ "punycode": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=", + "dev": true } } }, "url-loader": { - "version": "0.6.2", - "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-0.6.2.tgz", - "integrity": "sha512-h3qf9TNn53BpuXTTcpC+UehiRrl0Cv45Yr/xWayApjw6G8Bg2dGke7rIwDQ39piciWCWrC+WiqLjOh3SUp9n0Q==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-2.3.0.tgz", + "integrity": "sha512-goSdg8VY+7nPZKUEChZSEtW5gjbS66USIGCeSJ1OVOJ7Yfuh/36YxCwMi5HVEJh6mqUYOoy3NJ0vlOMrWsSHog==", + "dev": true, "requires": { - "loader-utils": "^1.0.2", - "mime": "^1.4.1", - "schema-utils": "^0.3.0" + "loader-utils": "^1.2.3", + "mime": "^2.4.4", + "schema-utils": "^2.5.0" + }, + "dependencies": { + "ajv-keywords": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", + "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==", + "dev": true + }, + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==", + "dev": true + }, + "schema-utils": { + "version": "2.6.5", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.6.5.tgz", + "integrity": "sha512-5KXuwKziQrTVHh8j/Uxz+QUbxkaLW9X/86NBlx/gnKgtsZA2GIVMUn17qWhRFwF8jdYb3Dig5hRO/W5mZqy6SQ==", + "dev": true, + "requires": { + "ajv": "^6.12.0", + "ajv-keywords": "^3.4.1" + } + } } }, "url-parse": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.4.4.tgz", - "integrity": "sha512-/92DTTorg4JjktLNLe6GPS2/RvAd/RGr6LuktmWSMLEOa6rjnlrFXNgSbSmkNvCoL2T028A0a1JaJLzRMlFoHg==", - "requires": { - "querystringify": "^2.0.0", - "requires-port": "^1.0.0" - } - }, - "url-parse-lax": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz", - "integrity": "sha1-evjzA2Rem9eaJy56FKxovAYJ2nM=", - "requires": { - "prepend-http": "^1.0.1" - } - }, - "url-regex": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/url-regex/-/url-regex-3.2.0.tgz", - "integrity": "sha1-260eDJ4p4QXdCx8J9oYvf9tIJyQ=", + "version": "1.4.7", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.4.7.tgz", + "integrity": "sha512-d3uaVyzDB9tQoSXFvuSUNFibTd9zxd2bkVrDRvF5TmvWWQwqE4lgYJ5m+x1DbecWkw+LK4RNl2CU1hHuOKPVlg==", "dev": true, "requires": { - "ip-regex": "^1.0.1" + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" } }, "use": { @@ -13873,9 +20005,10 @@ "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==" }, "util": { - "version": "0.10.4", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz", - "integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==", + "version": "0.11.1", + "resolved": "https://registry.npmjs.org/util/-/util-0.11.1.tgz", + "integrity": "sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ==", + "dev": true, "requires": { "inherits": "2.0.3" } @@ -13883,12 +20016,14 @@ "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", + "dev": true }, "util.promisify": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/util.promisify/-/util.promisify-1.0.0.tgz", "integrity": "sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA==", + "dev": true, "requires": { "define-properties": "^1.1.2", "object.getownpropertydescriptors": "^2.0.3" @@ -13897,22 +20032,32 @@ "utila": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/utila/-/utila-0.4.0.tgz", - "integrity": "sha1-ihagXURWV6Oupe7MWxKk+lN5dyw=" + "integrity": "sha1-ihagXURWV6Oupe7MWxKk+lN5dyw=", + "dev": true }, "utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=", + "dev": true }, "uuid": { "version": "3.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", + "dev": true + }, + "v8-compile-cache": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", + "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", + "dev": true }, "validate-npm-package-license": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, "requires": { "spdx-correct": "^3.0.0", "spdx-expression-parse": "^3.0.0" @@ -13926,17 +20071,20 @@ "vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=" + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=", + "dev": true }, "vendors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/vendors/-/vendors-1.0.2.tgz", - "integrity": "sha512-w/hry/368nO21AN9QljsaIhb9ZiZtZARoVH5f3CsFbawdLdayCgKRPup7CggujvySMxx0I91NOyxdVENohprLQ==" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/vendors/-/vendors-1.0.4.tgz", + "integrity": "sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w==", + "dev": true }, "verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "dev": true, "requires": { "assert-plus": "^1.0.0", "core-util-is": "1.0.2", @@ -13944,25 +20092,42 @@ } }, "vm-browserify": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-0.0.4.tgz", - "integrity": "sha1-XX6kW7755Kb/ZflUOOCofDV9WnM=", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz", + "integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==", + "dev": true + }, + "w3c-hr-time": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz", + "integrity": "sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ==", + "dev": true, "requires": { - "indexof": "0.0.1" + "browser-process-hrtime": "^1.0.0" } }, - "w3c-hr-time": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.1.tgz", - "integrity": "sha1-gqwr/2PZUOqeMYmlimViX+3xkEU=", + "w3c-xmlserializer": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-1.1.2.tgz", + "integrity": "sha512-p10l/ayESzrBMYWRID6xbuCKh2Fp77+sA0doRuGn4tTIMrrZVeqfpKjXHY+oDh3K4nLdPgNwMTVP6Vp4pvqbNg==", + "dev": true, "requires": { - "browser-process-hrtime": "^0.1.2" + "domexception": "^1.0.1", + "webidl-conversions": "^4.0.2", + "xml-name-validator": "^3.0.0" } }, + "wait-for-expect": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/wait-for-expect/-/wait-for-expect-3.0.2.tgz", + "integrity": "sha512-cfS1+DZxuav1aBYbaO/kE06EOS8yRw7qOFoD3XtjTkYvCvh3zUvNST8DXK/nPaeqIzIv3P3kL3lRJn8iwOiSag==", + "dev": true + }, "walker": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.7.tgz", "integrity": "sha1-L3+bj9ENZ3JisYqITijRlhjgKPs=", + "dev": true, "requires": { "makeerror": "1.0.x" } @@ -13975,19 +20140,11 @@ "loose-envify": "^1.0.0" } }, - "watch": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/watch/-/watch-0.18.0.tgz", - "integrity": "sha1-KAlUdsbffJDJYxOJkMClQj60uYY=", - "requires": { - "exec-sh": "^0.2.0", - "minimist": "^1.2.0" - } - }, "watchpack": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.6.0.tgz", "integrity": "sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA==", + "dev": true, "requires": { "chokidar": "^2.0.2", "graceful-fs": "^4.1.2", @@ -13998,294 +20155,176 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", + "dev": true, "requires": { "micromatch": "^3.1.4", "normalize-path": "^2.1.1" } }, "chokidar": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.0.4.tgz", - "integrity": "sha512-z9n7yt9rOvIJrMhvDtDictKrkFHeihkNl6uWMmZlmL6tJtX9Cs+87oK+teBx+JIgzvbX3yZHT3eF8vpbDxHJXQ==", + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", + "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", + "dev": true, "requires": { "anymatch": "^2.0.0", - "async-each": "^1.0.0", - "braces": "^2.3.0", - "fsevents": "^1.2.2", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "fsevents": "^1.2.7", "glob-parent": "^3.1.0", - "inherits": "^2.0.1", + "inherits": "^2.0.3", "is-binary-path": "^1.0.0", "is-glob": "^4.0.0", - "lodash.debounce": "^4.0.8", - "normalize-path": "^2.1.1", + "normalize-path": "^3.0.0", "path-is-absolute": "^1.0.0", - "readdirp": "^2.0.0", - "upath": "^1.0.5" + "readdirp": "^2.2.1", + "upath": "^1.1.1" + }, + "dependencies": { + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "upath": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", + "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==", + "dev": true + } } }, "glob-parent": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", - "requires": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - }, - "dependencies": { - "is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", - "requires": { - "is-extglob": "^2.1.0" - } - } - } - } - } - }, - "wbuf": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", - "integrity": "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==", - "requires": { - "minimalistic-assert": "^1.0.0" - } - }, - "webidl-conversions": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", - "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==" - }, - "webpack": { - "version": "3.8.1", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-3.8.1.tgz", - "integrity": "sha512-5ZXLWWsMqHKFr5y0N3Eo5IIisxeEeRAajNq4mELb/WELOR7srdbQk2N5XiyNy2A/AgvlR3AmeBCZJW8lHrolbw==", - "requires": { - "acorn": "^5.0.0", - "acorn-dynamic-import": "^2.0.0", - "ajv": "^5.1.5", - "ajv-keywords": "^2.0.0", - "async": "^2.1.2", - "enhanced-resolve": "^3.4.0", - "escope": "^3.6.0", - "interpret": "^1.0.0", - "json-loader": "^0.5.4", - "json5": "^0.5.1", - "loader-runner": "^2.3.0", - "loader-utils": "^1.1.0", - "memory-fs": "~0.4.1", - "mkdirp": "~0.5.0", - "node-libs-browser": "^2.0.0", - "source-map": "^0.5.3", - "supports-color": "^4.2.1", - "tapable": "^0.2.7", - "uglifyjs-webpack-plugin": "^0.4.6", - "watchpack": "^1.4.0", - "webpack-sources": "^1.0.1", - "yargs": "^8.0.2" - }, - "dependencies": { - "ajv-keywords": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-2.1.1.tgz", - "integrity": "sha1-YXmX/F9gV2iUxDX5QNgZ4TW4B2I=" - }, - "camelcase": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-1.2.1.tgz", - "integrity": "sha1-m7UwTS4LVmmLLHWLCKPqqdqlijk=" - }, - "cliui": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-2.1.0.tgz", - "integrity": "sha1-S0dXYP+AJkx2LDoXGQMukcf+oNE=", - "requires": { - "center-align": "^0.1.1", - "right-align": "^0.1.1", - "wordwrap": "0.0.2" - } - }, - "has-flag": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", - "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=" - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "json5": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-0.5.1.tgz", - "integrity": "sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE=" - }, - "load-json-file": { - "version": "2.0.0", - "resolved": "http://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", - "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^2.2.0", - "pify": "^2.0.0", - "strip-bom": "^3.0.0" - } - }, - "path-type": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", - "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", - "requires": { - "pify": "^2.0.0" - } - }, - "read-pkg": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", - "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", - "requires": { - "load-json-file": "^2.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^2.0.0" - } - }, - "read-pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", - "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", - "requires": { - "find-up": "^2.0.0", - "read-pkg": "^2.0.0" - } - }, - "strip-bom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", - "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=" - }, - "supports-color": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz", - "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=", - "requires": { - "has-flag": "^2.0.0" - } - }, - "uglify-js": { - "version": "2.8.29", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-2.8.29.tgz", - "integrity": "sha1-KcVzMUgFe7Th913zW3qcty5qWd0=", - "requires": { - "source-map": "~0.5.1", - "uglify-to-browserify": "~1.0.0", - "yargs": "~3.10.0" - }, - "dependencies": { - "yargs": { - "version": "3.10.0", - "resolved": "http://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", - "integrity": "sha1-9+572FfdfB0tOMDnTvvWgdFDH9E=", - "requires": { - "camelcase": "^1.0.2", - "cliui": "^2.1.0", - "decamelize": "^1.0.0", - "window-size": "0.1.0" - } - } - } - }, - "uglifyjs-webpack-plugin": { - "version": "0.4.6", - "resolved": "https://registry.npmjs.org/uglifyjs-webpack-plugin/-/uglifyjs-webpack-plugin-0.4.6.tgz", - "integrity": "sha1-uVH0q7a9YX5m9j64kUmOORdj4wk=", - "requires": { - "source-map": "^0.5.6", - "uglify-js": "^2.8.29", - "webpack-sources": "^1.0.1" - } - }, - "wordwrap": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.2.tgz", - "integrity": "sha1-t5Zpu0LstAn4PVg8rVLKF+qhZD8=" - }, - "yargs": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-8.0.2.tgz", - "integrity": "sha1-YpmpBVsc78lp/355wdkY3Osiw2A=", - "requires": { - "camelcase": "^4.1.0", - "cliui": "^3.2.0", - "decamelize": "^1.1.1", - "get-caller-file": "^1.0.1", - "os-locale": "^2.0.0", - "read-pkg-up": "^2.0.0", - "require-directory": "^2.1.1", - "require-main-filename": "^1.0.1", - "set-blocking": "^2.0.0", - "string-width": "^2.0.0", - "which-module": "^2.0.0", - "y18n": "^3.2.1", - "yargs-parser": "^7.0.0" + "dev": true, + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" }, "dependencies": { - "camelcase": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", - "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=" - }, - "cliui": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz", - "integrity": "sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=", + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, "requires": { - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wrap-ansi": "^2.0.0" - }, - "dependencies": { - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - } + "is-extglob": "^2.1.0" } } } + } + } + }, + "wbuf": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/wbuf/-/wbuf-1.7.3.tgz", + "integrity": "sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==", + "dev": true, + "requires": { + "minimalistic-assert": "^1.0.0" + } + }, + "webidl-conversions": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", + "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==", + "dev": true + }, + "webpack": { + "version": "4.42.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.42.0.tgz", + "integrity": "sha512-EzJRHvwQyBiYrYqhyjW9AqM90dE4+s1/XtCfn7uWg6cS72zH+2VPFAlsnW0+W0cDi0XRjNKUMoJtpSi50+Ph6w==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.8.5", + "@webassemblyjs/helper-module-context": "1.8.5", + "@webassemblyjs/wasm-edit": "1.8.5", + "@webassemblyjs/wasm-parser": "1.8.5", + "acorn": "^6.2.1", + "ajv": "^6.10.2", + "ajv-keywords": "^3.4.1", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^4.1.0", + "eslint-scope": "^4.0.3", + "json-parse-better-errors": "^1.0.2", + "loader-runner": "^2.4.0", + "loader-utils": "^1.2.3", + "memory-fs": "^0.4.1", + "micromatch": "^3.1.10", + "mkdirp": "^0.5.1", + "neo-async": "^2.6.1", + "node-libs-browser": "^2.2.1", + "schema-utils": "^1.0.0", + "tapable": "^1.1.3", + "terser-webpack-plugin": "^1.4.3", + "watchpack": "^1.6.0", + "webpack-sources": "^1.4.1" + }, + "dependencies": { + "acorn": { + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.1.tgz", + "integrity": "sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA==", + "dev": true + }, + "ajv-keywords": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", + "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==", + "dev": true + }, + "eslint-scope": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-4.0.3.tgz", + "integrity": "sha512-p7VutNr1O/QrxysMo3E45FjYDTeXBy0iTltPFNSqKAIfjDSXC+4dj+qfyuD8bfAXrW/y6lW3O76VaYNPKfpKrg==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } }, - "yargs-parser": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-7.0.0.tgz", - "integrity": "sha1-jQrELxbqVd69MyyvTEA4s+P139k=", + "neo-async": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.1.tgz", + "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + }, + "terser-webpack-plugin": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.3.tgz", + "integrity": "sha512-QMxecFz/gHQwteWwSo5nTc6UaICqN1bMedC5sMtUc7y3Ha3Q8y6ZO0iCR8pq4RJC8Hjf0FEPEHZqcMB/+DFCrA==", + "dev": true, "requires": { - "camelcase": "^4.1.0" - }, - "dependencies": { - "camelcase": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz", - "integrity": "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0=" - } + "cacache": "^12.0.2", + "find-cache-dir": "^2.1.0", + "is-wsl": "^1.1.0", + "schema-utils": "^1.0.0", + "serialize-javascript": "^2.1.2", + "source-map": "^0.6.1", + "terser": "^4.1.2", + "webpack-sources": "^1.4.0", + "worker-farm": "^1.7.0" } } } }, "webpack-bundle-analyzer": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.0.3.tgz", - "integrity": "sha512-naLWiRfmtH4UJgtUktRTLw6FdoZJ2RvCR9ePbwM9aRMsS/KjFerkPZG9epEvXRAw5d5oPdrs9+3p+afNjxW8Xw==", + "version": "3.6.1", + "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.6.1.tgz", + "integrity": "sha512-Nfd8HDwfSx1xBwC+P8QMGvHAOITxNBSvu/J/mCJvOwv+G4VWkU7zir9SSenTtyCi0LnVtmsc7G5SZo1uV+bxRw==", "dev": true, "requires": { - "acorn": "^5.7.3", + "acorn": "^7.1.1", + "acorn-walk": "^7.1.1", "bfj": "^6.1.1", "chalk": "^2.4.1", "commander": "^2.18.0", @@ -14293,16 +20332,28 @@ "express": "^4.16.3", "filesize": "^3.6.1", "gzip-size": "^5.0.0", - "lodash": "^4.17.10", + "lodash": "^4.17.15", "mkdirp": "^0.5.1", "opener": "^1.5.1", "ws": "^6.0.0" }, "dependencies": { + "acorn": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz", + "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==", + "dev": true + }, + "acorn-walk": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.1.1.tgz", + "integrity": "sha512-wdlPY2tm/9XBr7QkKlq0WQVgiuGTX6YWPyRyBviSoScBuLfTVQhvwg6wJ369GJ/1nPfTLMfnrFIfjqVg6d+jQQ==", + "dev": true + }, "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dev": true, "requires": { "ansi-styles": "^3.2.1", @@ -14310,32 +20361,10 @@ "supports-color": "^5.3.0" } }, - "filesize": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/filesize/-/filesize-3.6.1.tgz", - "integrity": "sha512-7KjR1vv6qnicaPMi1iiTcI85CyYwRO/PSFCu6SvqL8jN2Wjt/NIYQTFtFs7fSDCYOstUkEWIQGFUg5YZQfjlcg==", - "dev": true - }, - "gzip-size": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-5.0.0.tgz", - "integrity": "sha512-5iI7omclyqrnWw4XbXAmGhPsABkSIDQonv2K0h61lybgofWa6iZyvrI3r2zsJH4P8Nb64fFVzlvfhs0g7BBxAA==", - "dev": true, - "requires": { - "duplexer": "^0.1.1", - "pify": "^3.0.0" - } - }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true - }, "ws": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/ws/-/ws-6.1.2.tgz", - "integrity": "sha512-rfUqzvz0WxmSXtJpPMX2EeASXabOrSMk1ruMOV3JBTBjo4ac2lDjGGsbQSyxj8Odhw5fBib8ZKEjDNvgouNKYw==", + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.1.tgz", + "integrity": "sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==", "dev": true, "requires": { "async-limiter": "~1.0.0" @@ -14344,352 +20373,394 @@ } }, "webpack-dev-middleware": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-1.12.2.tgz", - "integrity": "sha512-FCrqPy1yy/sN6U/SaEZcHKRXGlqU0DUaEBL45jkUYoB8foVb6wCnbIJ1HKIx+qUFTW+3JpVcCJCxZ8VATL4e+A==", + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz", + "integrity": "sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw==", + "dev": true, "requires": { - "memory-fs": "~0.4.1", - "mime": "^1.5.0", - "path-is-absolute": "^1.0.0", - "range-parser": "^1.0.3", - "time-stamp": "^2.0.0" + "memory-fs": "^0.4.1", + "mime": "^2.4.4", + "mkdirp": "^0.5.1", + "range-parser": "^1.2.1", + "webpack-log": "^2.0.0" + }, + "dependencies": { + "mime": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", + "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==", + "dev": true + } } }, "webpack-dev-server": { - "version": "2.9.4", - "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-2.9.4.tgz", - "integrity": "sha512-thrqC0EQEoSjXeYgP6pUXcUCZ+LNrKsDPn+mItLnn5VyyNZOJKd06hUP5vqkYwL8nWWXsii0loSF9NHNccT6ow==", + "version": "3.10.3", + "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.10.3.tgz", + "integrity": "sha512-e4nWev8YzEVNdOMcNzNeCN947sWJNd43E5XvsJzbAL08kGc2frm1tQ32hTJslRS+H65LCb/AaUCYU7fjHCpDeQ==", + "dev": true, "requires": { "ansi-html": "0.0.7", - "array-includes": "^3.0.3", "bonjour": "^3.5.0", - "chokidar": "^1.6.0", - "compression": "^1.5.2", - "connect-history-api-fallback": "^1.3.0", - "debug": "^3.1.0", - "del": "^3.0.0", - "express": "^4.13.3", - "html-entities": "^1.2.0", - "http-proxy-middleware": "~0.17.4", - "import-local": "^0.1.1", - "internal-ip": "1.2.0", + "chokidar": "^2.1.8", + "compression": "^1.7.4", + "connect-history-api-fallback": "^1.6.0", + "debug": "^4.1.1", + "del": "^4.1.1", + "express": "^4.17.1", + "html-entities": "^1.2.1", + "http-proxy-middleware": "0.19.1", + "import-local": "^2.0.0", + "internal-ip": "^4.3.0", "ip": "^1.1.5", - "killable": "^1.0.0", - "loglevel": "^1.4.1", - "opn": "^5.1.0", - "portfinder": "^1.0.9", - "selfsigned": "^1.9.1", - "serve-index": "^1.7.2", - "sockjs": "0.3.18", - "sockjs-client": "1.1.4", - "spdy": "^3.4.1", + "is-absolute-url": "^3.0.3", + "killable": "^1.0.1", + "loglevel": "^1.6.6", + "opn": "^5.5.0", + "p-retry": "^3.0.1", + "portfinder": "^1.0.25", + "schema-utils": "^1.0.0", + "selfsigned": "^1.10.7", + "semver": "^6.3.0", + "serve-index": "^1.9.1", + "sockjs": "0.3.19", + "sockjs-client": "1.4.0", + "spdy": "^4.0.1", "strip-ansi": "^3.0.1", - "supports-color": "^4.2.1", - "webpack-dev-middleware": "^1.11.0", - "yargs": "^6.6.0" + "supports-color": "^6.1.0", + "url": "^0.11.0", + "webpack-dev-middleware": "^3.7.2", + "webpack-log": "^2.0.0", + "ws": "^6.2.1", + "yargs": "12.0.5" }, "dependencies": { - "arr-diff": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-2.0.0.tgz", - "integrity": "sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8=", - "requires": { - "arr-flatten": "^1.0.1" - } - }, - "array-unique": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz", - "integrity": "sha1-odl8yvy8JiXMcPrc6zalDFiwGlM=" + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true }, - "braces": { - "version": "1.8.5", - "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.5.tgz", - "integrity": "sha1-uneWLhLf+WnWt2cR6RS3N4V79qc=", + "chokidar": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", + "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", + "dev": true, "requires": { - "expand-range": "^1.8.1", - "preserve": "^0.2.0", - "repeat-element": "^1.1.2" + "anymatch": "^2.0.0", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "fsevents": "^1.2.7", + "glob-parent": "^3.1.0", + "inherits": "^2.0.3", + "is-binary-path": "^1.0.0", + "is-glob": "^4.0.0", + "normalize-path": "^3.0.0", + "path-is-absolute": "^1.0.0", + "readdirp": "^2.2.1", + "upath": "^1.1.1" } }, - "camelcase": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-3.0.0.tgz", - "integrity": "sha1-MvxLn82vhF/N9+c7uXysImHwqwo=" - }, "cliui": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz", - "integrity": "sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", + "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", + "dev": true, "requires": { - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", + "string-width": "^2.1.1", + "strip-ansi": "^4.0.0", "wrap-ansi": "^2.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true + }, + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + } } }, - "del": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/del/-/del-3.0.0.tgz", - "integrity": "sha1-U+z2mf/LyzljdpGrE7rxYIGXZuU=", + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, "requires": { - "globby": "^6.1.0", - "is-path-cwd": "^1.0.0", - "is-path-in-cwd": "^1.0.0", - "p-map": "^1.1.1", - "pify": "^3.0.0", - "rimraf": "^2.2.8" + "ms": "^2.1.1" } }, - "expand-brackets": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz", - "integrity": "sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s=", + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, "requires": { - "is-posix-bracket": "^0.1.0" + "locate-path": "^3.0.0" } }, - "extglob": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.2.tgz", - "integrity": "sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE=", - "requires": { - "is-extglob": "^1.0.0" - }, - "dependencies": { - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" - } - } + "get-caller-file": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", + "dev": true }, - "globby": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", - "integrity": "sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=", + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "dev": true, "requires": { - "array-union": "^1.0.1", - "glob": "^7.0.3", - "object-assign": "^4.0.1", - "pify": "^2.0.0", - "pinkie-promise": "^2.0.0" + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" }, "dependencies": { - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=" + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, + "requires": { + "is-extglob": "^2.1.0" + } } } }, - "has-flag": { + "is-absolute-url": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-3.0.3.tgz", + "integrity": "sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==", + "dev": true + }, + "is-fullwidth-code-point": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", - "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=" + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true }, - "http-proxy-middleware": { - "version": "0.17.4", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-0.17.4.tgz", - "integrity": "sha1-ZC6ISIUdZvCdTxJJEoRtuutBuDM=", + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, "requires": { - "http-proxy": "^1.16.2", - "is-glob": "^3.1.0", - "lodash": "^4.17.2", - "micromatch": "^2.3.11" + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" } }, - "import-local": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-0.1.1.tgz", - "integrity": "sha1-sReVcqrNwRxqkQCftDDbyrX2aKg=", - "requires": { - "pkg-dir": "^2.0.0", - "resolve-cwd": "^2.0.0" - } + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "requires": { - "number-is-nan": "^1.0.0" - } + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true }, - "is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, "requires": { - "is-extglob": "^2.1.0" + "p-try": "^2.0.0" } }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, "requires": { - "is-buffer": "^1.1.5" + "p-limit": "^2.0.0" } }, - "micromatch": { - "version": "2.3.11", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz", - "integrity": "sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU=", - "requires": { - "arr-diff": "^2.0.0", - "array-unique": "^0.2.1", - "braces": "^1.8.2", - "expand-brackets": "^0.1.4", - "extglob": "^0.3.1", - "filename-regex": "^2.0.0", - "is-extglob": "^1.0.0", - "is-glob": "^2.0.1", - "kind-of": "^3.0.2", - "normalize-path": "^2.0.1", - "object.omit": "^2.0.0", - "parse-glob": "^3.0.4", - "regex-cache": "^0.4.2" + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "require-main-filename": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", + "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=", + "dev": true + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + }, + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "requires": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" }, "dependencies": { - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=" + "ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, "requires": { - "is-extglob": "^1.0.0" + "ansi-regex": "^3.0.0" } } } }, - "os-locale": { - "version": "1.4.0", - "resolved": "http://registry.npmjs.org/os-locale/-/os-locale-1.4.0.tgz", - "integrity": "sha1-IPnxeuKe00XoveWDsT0gCYA8FNk=", + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, "requires": { - "lcid": "^1.0.0" + "ansi-regex": "^2.0.0" } }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=" + "supports-color": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-6.1.0.tgz", + "integrity": "sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } }, - "sockjs-client": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.1.4.tgz", - "integrity": "sha1-W6vjhrd15M8U51IJEUUmVAFsixI=", + "wrap-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "dev": true, "requires": { - "debug": "^2.6.6", - "eventsource": "0.1.6", - "faye-websocket": "~0.11.0", - "inherits": "^2.0.1", - "json3": "^3.3.2", - "url-parse": "^1.1.8" + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1" }, "dependencies": { - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dev": true, + "requires": { + "number-is-nan": "^1.0.0" + } + }, + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, "requires": { - "ms": "2.0.0" + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" } } } }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - }, - "supports-color": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz", - "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=", + "ws": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-6.2.1.tgz", + "integrity": "sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA==", + "dev": true, "requires": { - "has-flag": "^2.0.0" + "async-limiter": "~1.0.0" } }, - "which-module": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/which-module/-/which-module-1.0.0.tgz", - "integrity": "sha1-u6Y8qGGUiZT/MHc2CJ47lgJsKk8=" - }, "yargs": { - "version": "6.6.0", - "resolved": "http://registry.npmjs.org/yargs/-/yargs-6.6.0.tgz", - "integrity": "sha1-eC7CHvQDNF+DCoCMo9UTr1YGUgg=", + "version": "12.0.5", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-12.0.5.tgz", + "integrity": "sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw==", + "dev": true, "requires": { - "camelcase": "^3.0.0", - "cliui": "^3.2.0", - "decamelize": "^1.1.1", + "cliui": "^4.0.0", + "decamelize": "^1.2.0", + "find-up": "^3.0.0", "get-caller-file": "^1.0.1", - "os-locale": "^1.4.0", - "read-pkg-up": "^1.0.1", + "os-locale": "^3.0.0", "require-directory": "^2.1.1", "require-main-filename": "^1.0.1", "set-blocking": "^2.0.0", - "string-width": "^1.0.2", - "which-module": "^1.0.0", - "y18n": "^3.2.1", - "yargs-parser": "^4.2.0" + "string-width": "^2.0.0", + "which-module": "^2.0.0", + "y18n": "^3.2.1 || ^4.0.0", + "yargs-parser": "^11.1.1" } }, "yargs-parser": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-4.2.1.tgz", - "integrity": "sha1-KczqwNxPA8bIe0qfIX3RjJ90hxw=", + "version": "11.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-11.1.1.tgz", + "integrity": "sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ==", + "dev": true, "requires": { - "camelcase": "^3.0.0" + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" } } } }, + "webpack-log": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/webpack-log/-/webpack-log-2.0.0.tgz", + "integrity": "sha512-cX8G2vR/85UYG59FgkoMamwHUIkSSlV3bBMRsbxVXVUk2j6NleCKjQ/WE9eYg9WY4w25O9w8wKP4rzNZFmUcUg==", + "dev": true, + "requires": { + "ansi-colors": "^3.0.0", + "uuid": "^3.3.2" + } + }, "webpack-manifest-plugin": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/webpack-manifest-plugin/-/webpack-manifest-plugin-1.3.2.tgz", - "integrity": "sha512-MX60Bv2G83Zks9pi3oLOmRgnPAnwrlMn+lftMrWBm199VQjk46/xgzBi9lPfpZldw2+EI2S+OevuLIaDuxCWRw==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/webpack-manifest-plugin/-/webpack-manifest-plugin-2.2.0.tgz", + "integrity": "sha512-9S6YyKKKh/Oz/eryM1RyLVDVmy3NSPV0JXMRhZ18fJsq+AwGxUY34X54VNwkzYcEmEkDwNxuEOboCZEebJXBAQ==", + "dev": true, "requires": { - "fs-extra": "^0.30.0", - "lodash": ">=3.5 <5" + "fs-extra": "^7.0.0", + "lodash": ">=3.5 <5", + "object.entries": "^1.1.0", + "tapable": "^1.0.0" }, "dependencies": { "fs-extra": { - "version": "0.30.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-0.30.0.tgz", - "integrity": "sha1-8jP/zAjU2n1DLapEl3aYnbHfk/A=", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", + "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", + "dev": true, "requires": { "graceful-fs": "^4.1.2", - "jsonfile": "^2.1.0", - "klaw": "^1.0.0", - "path-is-absolute": "^1.0.0", - "rimraf": "^2.2.8" - } - }, - "jsonfile": { - "version": "2.4.0", - "resolved": "http://registry.npmjs.org/jsonfile/-/jsonfile-2.4.0.tgz", - "integrity": "sha1-NzaitCi4e72gzIO1P6PWM6NcKug=", - "requires": { - "graceful-fs": "^4.1.6" + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" } } } }, "webpack-sources": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.3.0.tgz", - "integrity": "sha512-OiVgSrbGu7NEnEvQJJgdSFPl2qWKkWq5lHMhgiToIiN9w34EBnjYzSYs+VbL5KoYiLNtFFa7BZIKxRED3I32pA==", + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz", + "integrity": "sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==", + "dev": true, "requires": { "source-list-map": "^2.0.0", "source-map": "~0.6.1" @@ -14698,28 +20769,33 @@ "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true } } }, "websocket-driver": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.0.tgz", - "integrity": "sha1-DK+dLXVdk67gSdS90NP+LMoqJOs=", + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.3.tgz", + "integrity": "sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg==", + "dev": true, "requires": { - "http-parser-js": ">=0.4.0", + "http-parser-js": ">=0.4.0 <0.4.11", + "safe-buffer": ">=5.1.0", "websocket-extensions": ">=0.1.1" } }, "websocket-extensions": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.3.tgz", - "integrity": "sha512-nqHUnMXmBzT0w570r2JpJxfiSD1IzoI+HGVdd3aZ0yNi3ngvQ4jv1dtHt5VGxfI2yj5yqImPhOK4vmIh2xMbGg==" + "integrity": "sha512-nqHUnMXmBzT0w570r2JpJxfiSD1IzoI+HGVdd3aZ0yNi3ngvQ4jv1dtHt5VGxfI2yj5yqImPhOK4vmIh2xMbGg==", + "dev": true }, "whatwg-encoding": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz", "integrity": "sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw==", + "dev": true, "requires": { "iconv-lite": "0.4.24" } @@ -14732,27 +20808,25 @@ "whatwg-mimetype": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz", - "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==" + "integrity": "sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==", + "dev": true }, "whatwg-url": { "version": "6.5.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-6.5.0.tgz", "integrity": "sha512-rhRZRqx/TLJQWUpQ6bmrt2UV4f0HCQ463yQuONJqC6fO2VoEb1pTYddbe59SkYq87aoM5A3bdhMZiUiVws+fzQ==", + "dev": true, "requires": { "lodash.sortby": "^4.7.0", "tr46": "^1.0.1", "webidl-conversions": "^4.0.2" } }, - "whet.extend": { - "version": "0.9.9", - "resolved": "https://registry.npmjs.org/whet.extend/-/whet.extend-0.9.9.tgz", - "integrity": "sha1-+HfVv2SMl+WqVC+twW1qJZucEaE=" - }, "which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, "requires": { "isexe": "^2.0.0" } @@ -14760,59 +20834,264 @@ "which-module": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", - "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "dev": true }, - "widest-line": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-2.0.1.tgz", - "integrity": "sha512-Ba5m9/Fa4Xt9eb2ELXt77JxVDV8w7qQrH0zS/TWSJdLyAwQjWoOzpzj5lwVftDz6n/EOu3tNACS84v509qwnJA==", + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "dev": true + }, + "wordwrap": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", + "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", + "dev": true + }, + "workbox-background-sync": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-background-sync/-/workbox-background-sync-4.3.1.tgz", + "integrity": "sha512-1uFkvU8JXi7L7fCHVBEEnc3asPpiAL33kO495UMcD5+arew9IbKW2rV5lpzhoWcm/qhGB89YfO4PmB/0hQwPRg==", + "dev": true, "requires": { - "string-width": "^2.1.1" + "workbox-core": "^4.3.1" } }, - "window-size": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", - "integrity": "sha1-VDjNLqk7IC76Ohn+iIeu58lPnJ0=" + "workbox-broadcast-update": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-broadcast-update/-/workbox-broadcast-update-4.3.1.tgz", + "integrity": "sha512-MTSfgzIljpKLTBPROo4IpKjESD86pPFlZwlvVG32Kb70hW+aob4Jxpblud8EhNb1/L5m43DUM4q7C+W6eQMMbA==", + "dev": true, + "requires": { + "workbox-core": "^4.3.1" + } }, - "wordwrap": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz", - "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc=" + "workbox-build": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-build/-/workbox-build-4.3.1.tgz", + "integrity": "sha512-UHdwrN3FrDvicM3AqJS/J07X0KXj67R8Cg0waq1MKEOqzo89ap6zh6LmaLnRAjpB+bDIz+7OlPye9iii9KBnxw==", + "dev": true, + "requires": { + "@babel/runtime": "^7.3.4", + "@hapi/joi": "^15.0.0", + "common-tags": "^1.8.0", + "fs-extra": "^4.0.2", + "glob": "^7.1.3", + "lodash.template": "^4.4.0", + "pretty-bytes": "^5.1.0", + "stringify-object": "^3.3.0", + "strip-comments": "^1.0.2", + "workbox-background-sync": "^4.3.1", + "workbox-broadcast-update": "^4.3.1", + "workbox-cacheable-response": "^4.3.1", + "workbox-core": "^4.3.1", + "workbox-expiration": "^4.3.1", + "workbox-google-analytics": "^4.3.1", + "workbox-navigation-preload": "^4.3.1", + "workbox-precaching": "^4.3.1", + "workbox-range-requests": "^4.3.1", + "workbox-routing": "^4.3.1", + "workbox-strategies": "^4.3.1", + "workbox-streams": "^4.3.1", + "workbox-sw": "^4.3.1", + "workbox-window": "^4.3.1" + }, + "dependencies": { + "@babel/runtime": { + "version": "7.9.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.9.2.tgz", + "integrity": "sha512-NE2DtOdufG7R5vnfQUTehdTfNycfUANEtCa9PssN9O/xmTzP4E08UI797ixaei6hBEVL9BI/PsdJS5x7mWoB9Q==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "fs-extra": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-4.0.3.tgz", + "integrity": "sha512-q6rbdDd1o2mAnQreO7YADIxf/Whx4AHBiRf6d+/cVT8h44ss+lHgxf1FemcqDnQt9X3ct4McHr+JMGlYSsK7Cg==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "regenerator-runtime": { + "version": "0.13.5", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", + "integrity": "sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==", + "dev": true + } + } + }, + "workbox-cacheable-response": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-cacheable-response/-/workbox-cacheable-response-4.3.1.tgz", + "integrity": "sha512-Rp5qlzm6z8IOvnQNkCdO9qrDgDpoPNguovs0H8C+wswLuPgSzSp9p2afb5maUt9R1uTIwOXrVQMmPfPypv+npw==", + "dev": true, + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-core": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-core/-/workbox-core-4.3.1.tgz", + "integrity": "sha512-I3C9jlLmMKPxAC1t0ExCq+QoAMd0vAAHULEgRZ7kieCdUd919n53WC0AfvokHNwqRhGn+tIIj7vcb5duCjs2Kg==", + "dev": true + }, + "workbox-expiration": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-expiration/-/workbox-expiration-4.3.1.tgz", + "integrity": "sha512-vsJLhgQsQouv9m0rpbXubT5jw0jMQdjpkum0uT+d9tTwhXcEZks7qLfQ9dGSaufTD2eimxbUOJfWLbNQpIDMPw==", + "dev": true, + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-google-analytics": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-google-analytics/-/workbox-google-analytics-4.3.1.tgz", + "integrity": "sha512-xzCjAoKuOb55CBSwQrbyWBKqp35yg1vw9ohIlU2wTy06ZrYfJ8rKochb1MSGlnoBfXGWss3UPzxR5QL5guIFdg==", + "dev": true, + "requires": { + "workbox-background-sync": "^4.3.1", + "workbox-core": "^4.3.1", + "workbox-routing": "^4.3.1", + "workbox-strategies": "^4.3.1" + } + }, + "workbox-navigation-preload": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-navigation-preload/-/workbox-navigation-preload-4.3.1.tgz", + "integrity": "sha512-K076n3oFHYp16/C+F8CwrRqD25GitA6Rkd6+qAmLmMv1QHPI2jfDwYqrytOfKfYq42bYtW8Pr21ejZX7GvALOw==", + "dev": true, + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-precaching": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-precaching/-/workbox-precaching-4.3.1.tgz", + "integrity": "sha512-piSg/2csPoIi/vPpp48t1q5JLYjMkmg5gsXBQkh/QYapCdVwwmKlU9mHdmy52KsDGIjVaqEUMFvEzn2LRaigqQ==", + "dev": true, + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-range-requests": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-range-requests/-/workbox-range-requests-4.3.1.tgz", + "integrity": "sha512-S+HhL9+iTFypJZ/yQSl/x2Bf5pWnbXdd3j57xnb0V60FW1LVn9LRZkPtneODklzYuFZv7qK6riZ5BNyc0R0jZA==", + "dev": true, + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-routing": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-routing/-/workbox-routing-4.3.1.tgz", + "integrity": "sha512-FkbtrODA4Imsi0p7TW9u9MXuQ5P4pVs1sWHK4dJMMChVROsbEltuE79fBoIk/BCztvOJ7yUpErMKa4z3uQLX+g==", + "dev": true, + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-strategies": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-strategies/-/workbox-strategies-4.3.1.tgz", + "integrity": "sha512-F/+E57BmVG8dX6dCCopBlkDvvhg/zj6VDs0PigYwSN23L8hseSRwljrceU2WzTvk/+BSYICsWmRq5qHS2UYzhw==", + "dev": true, + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-streams": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-streams/-/workbox-streams-4.3.1.tgz", + "integrity": "sha512-4Kisis1f/y0ihf4l3u/+ndMkJkIT4/6UOacU3A4BwZSAC9pQ9vSvJpIi/WFGQRH/uPXvuVjF5c2RfIPQFSS2uA==", + "dev": true, + "requires": { + "workbox-core": "^4.3.1" + } + }, + "workbox-sw": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-sw/-/workbox-sw-4.3.1.tgz", + "integrity": "sha512-0jXdusCL2uC5gM3yYFT6QMBzKfBr2XTk0g5TPAV4y8IZDyVNDyj1a8uSXy3/XrvkVTmQvLN4O5k3JawGReXr9w==", + "dev": true + }, + "workbox-webpack-plugin": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-webpack-plugin/-/workbox-webpack-plugin-4.3.1.tgz", + "integrity": "sha512-gJ9jd8Mb8wHLbRz9ZvGN57IAmknOipD3W4XNE/Lk/4lqs5Htw4WOQgakQy/o/4CoXQlMCYldaqUg+EJ35l9MEQ==", + "dev": true, + "requires": { + "@babel/runtime": "^7.0.0", + "json-stable-stringify": "^1.0.1", + "workbox-build": "^4.3.1" + } + }, + "workbox-window": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/workbox-window/-/workbox-window-4.3.1.tgz", + "integrity": "sha512-C5gWKh6I58w3GeSc0wp2Ne+rqVw8qwcmZnQGpjiek8A2wpbxSJb1FdCoQVO+jDJs35bFgo/WETgl1fqgsxN0Hg==", + "dev": true, + "requires": { + "workbox-core": "^4.3.1" + } }, "worker-farm": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.6.0.tgz", - "integrity": "sha512-6w+3tHbM87WnSWnENBUvA2pxJPLhQUg5LKwUQHq3r+XPhIM+Gh2R5ycbwPCyuGbNg+lPgdcnQUhuC02kJCvffQ==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/worker-farm/-/worker-farm-1.7.0.tgz", + "integrity": "sha512-rvw3QTZc8lAxyVrqcSGVm5yP/IJ2UcB3U0graE3LCFoZ0Yn2x4EoVSqJKdB/T5M+FLcRPjz4TDacRf3OCfNUzw==", + "dev": true, "requires": { "errno": "~0.1.7" } }, + "worker-rpc": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/worker-rpc/-/worker-rpc-0.1.1.tgz", + "integrity": "sha512-P1WjMrUB3qgJNI9jfmpZ/htmBEjFh//6l/5y8SD9hg1Ef5zTTVVoRjTrTEzPrNBQvmhMxkoTsjOXN10GWU7aCg==", + "dev": true, + "requires": { + "microevent.ts": "~0.1.1" + } + }, "wrap-ansi": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", - "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "dev": true, "requires": { - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1" + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" }, "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "requires": { - "number-is-nan": "^1.0.0" - } + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true }, "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" } } } @@ -14820,12 +21099,23 @@ "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "write": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", + "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", + "dev": true, + "requires": { + "mkdirp": "^0.5.1" + } }, "write-file-atomic": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.3.0.tgz", - "integrity": "sha512-xuPeK4OdjWqtfi59ylvVL0Yn35SF3zgcAcv7rBPFHVaEapaDr4GdGgm3j7ckTwH9wHL7fGmgfAnb0+THrHb8tA==", + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.1.tgz", + "integrity": "sha512-TGHFeZEZMnv+gBFRfjAcxL5bPHrsGKtnb4qsFAws7/vlh+QfwAaySIw4AXP9ZskTTh5GWu3FLuJhsWVdiJPGvg==", + "dev": true, "requires": { "graceful-fs": "^4.1.11", "imurmurhash": "^0.1.4", @@ -14836,103 +21126,170 @@ "version": "5.2.2", "resolved": "https://registry.npmjs.org/ws/-/ws-5.2.2.tgz", "integrity": "sha512-jaHFD6PFv6UgoIVda6qZllptQsMlDEJkTQcybzzXDYM1XO9Y8em691FGMPmM46WGyLU4z9KMgQN+qrux/nhlHA==", - "requires": { - "async-limiter": "~1.0.0" - } - }, - "xdg-basedir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-3.0.0.tgz", - "integrity": "sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ=" - }, - "xhr": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/xhr/-/xhr-2.5.0.tgz", - "integrity": "sha512-4nlO/14t3BNUZRXIXfXe+3N6w3s1KoxcJUUURctd64BLRe67E4gRwp4PjywtDY72fXpZ1y6Ch0VZQRY/gMPzzQ==", "dev": true, "requires": { - "global": "~4.3.0", - "is-function": "^1.0.1", - "parse-headers": "^2.0.0", - "xtend": "^4.0.0" + "async-limiter": "~1.0.0" } }, "xml-name-validator": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-3.0.0.tgz", - "integrity": "sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==" + "integrity": "sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw==", + "dev": true }, - "xml-parse-from-string": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz", - "integrity": "sha1-qQKekp09vN7RafPG4oI42VpdWig=", + "xmlchars": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", + "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==", "dev": true }, - "xml2js": { - "version": "0.4.19", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", - "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==", + "xregexp": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/xregexp/-/xregexp-4.3.0.tgz", + "integrity": "sha512-7jXDIFXh5yJ/orPn4SXjuVrWWoi4Cr8jfV1eHv9CixKSbU+jY4mxfrBwAuDvupPNKpMUY+FeIqsVw/JLT9+B8g==", "dev": true, "requires": { - "sax": ">=0.6.0", - "xmlbuilder": "~9.0.1" + "@babel/runtime-corejs3": "^7.8.3" } }, - "xmlbuilder": { - "version": "9.0.7", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", - "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=", - "dev": true - }, "xtend": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", - "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=" + "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=", + "dev": true }, "y18n": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.1.tgz", - "integrity": "sha1-bRX7qITAhnnA136I53WegR4H+kE=" + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "dev": true }, "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true + }, + "yaml": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.8.3.tgz", + "integrity": "sha512-X/v7VDnK+sxbQ2Imq4Jt2PRUsRsP7UcpSl3Llg6+NRRqWLIvxkMFYtH1FmvwNGYRKKPa+EPA4qDBlI9WVG1UKw==", + "dev": true, + "requires": { + "@babel/runtime": "^7.8.7" + }, + "dependencies": { + "@babel/runtime": { + "version": "7.9.2", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.9.2.tgz", + "integrity": "sha512-NE2DtOdufG7R5vnfQUTehdTfNycfUANEtCa9PssN9O/xmTzP4E08UI797ixaei6hBEVL9BI/PsdJS5x7mWoB9Q==", + "dev": true, + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "regenerator-runtime": { + "version": "0.13.5", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", + "integrity": "sha512-ZS5w8CpKFinUzOwW3c83oPeVXoNsrLsaCoLtJvAClH135j/R77RuymhiSErhm2lKcwSCIpmvIWSbDkIfAqKQlA==", + "dev": true + } + } }, "yargs": { - "version": "10.1.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-10.1.2.tgz", - "integrity": "sha512-ivSoxqBGYOqQVruxD35+EyCFDYNEFL/Uo6FcOnz+9xZdZzK0Zzw4r4KhbrME1Oo2gOggwJod2MnsdamSG7H9ig==", - "requires": { - "cliui": "^4.0.0", - "decamelize": "^1.1.1", - "find-up": "^2.1.0", - "get-caller-file": "^1.0.1", - "os-locale": "^2.0.0", + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", + "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", + "dev": true, + "requires": { + "cliui": "^5.0.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", "require-directory": "^2.1.1", - "require-main-filename": "^1.0.1", + "require-main-filename": "^2.0.0", "set-blocking": "^2.0.0", - "string-width": "^2.0.0", + "string-width": "^3.0.0", "which-module": "^2.0.0", - "y18n": "^3.2.1", - "yargs-parser": "^8.1.0" + "y18n": "^4.0.0", + "yargs-parser": "^13.1.2" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } } }, "yargs-parser": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-8.1.0.tgz", - "integrity": "sha512-yP+6QqN8BmrgW2ggLtTbdrOyBNSI7zBa4IykmiV5R1wl1JWNxQvWhMfMdmzIYtKU7oP3OOInY/tl2ov3BDjnJQ==", - "requires": { - "camelcase": "^4.1.0" - } - }, - "yauzl": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.4.1.tgz", - "integrity": "sha1-lSj0QtqxsihOWLQ3m7GU4i4MQAU=", + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", "dev": true, "requires": { - "fd-slicer": "~1.0.1" + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" } }, "yn": { diff --git a/frontend/package.json b/frontend/package.json index 1756ea907a1..1fe182a67d1 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -3,102 +3,131 @@ "version": "0.1.0", "private": true, "dependencies": { - "@grpc/proto-loader": "^0.3.0", + "@kubeflow/frontend": "github:kubeflow/frontend#fffc59d", "@material-ui/core": "3.7.1", "@material-ui/icons": "^3.0.1", - "@types/js-yaml": "^3.11.2", "brace": "^0.11.1", "d3": "^5.7.0", "d3-dsv": "^1.0.10", "dagre": "^0.8.2", "google-protobuf": "^3.6.1", - "grpc": "^1.15.0", - "grpc-web": "^1.0.0", "http-proxy-middleware": "^0.19.0", "immer": "^1.7.4", - "js-yaml": "^3.12.0", - "lodash": ">=4.17.11", - "markdown-it": "^8.4.2", + "js-yaml": "^3.13.1", + "lodash": "^4.17.15", + "markdown-to-jsx": "^6.10.3", "portable-fetch": "^3.0.0", "re-resizable": "^4.9.0", - "react": "^16.7.0", + "react": "^16.12.0", "react-ace": "^7.0.2", - "react-dom": "^16.5.2", + "react-dom": "^16.12.0", "react-dropzone": "^5.1.0", "react-router-dom": "^4.3.1", - "react-scripts-ts": "2.17.0", "react-virtualized": "^9.20.1", "react-vis": "^1.11.2", - "ts-protoc-gen": "^0.10.0", - "typestyle": "^2.0.1" + "typestyle": "^2.0.4" }, "scripts": { "analyze-bundle": "node analyze_bundle.js", - "apis": "npm run apis:experiment && npm run apis:job && npm run apis:pipeline && npm run apis:run && npm run apis:filter && npm run apis:visualization", + "apis": "npm run apis:experiment && npm run apis:job && npm run apis:pipeline && npm run apis:run && npm run apis:filter && npm run apis:visualization && npm run apis:auth", "apis:experiment": "java -jar swagger-codegen-cli.jar generate -i ../backend/api/swagger/experiment.swagger.json -l typescript-fetch -o ./src/apis/experiment -c ./swagger-config.json", "apis:job": "java -jar swagger-codegen-cli.jar generate -i ../backend/api/swagger/job.swagger.json -l typescript-fetch -o ./src/apis/job -c ./swagger-config.json", "apis:pipeline": "java -jar swagger-codegen-cli.jar generate -i ../backend/api/swagger/pipeline.swagger.json -l typescript-fetch -o ./src/apis/pipeline -c ./swagger-config.json", "apis:run": "java -jar swagger-codegen-cli.jar generate -i ../backend/api/swagger/run.swagger.json -l typescript-fetch -o ./src/apis/run -c ./swagger-config.json", "apis:filter": "java -jar swagger-codegen-cli.jar generate -i ../backend/api/swagger/filter.swagger.json -l typescript-fetch -o ./src/apis/filter -c ./swagger-config.json", "apis:visualization": "java -jar swagger-codegen-cli.jar generate -i ../backend/api/swagger/visualization.swagger.json -l typescript-fetch -o ./src/apis/visualization -c ./swagger-config.json", - "build": "react-scripts-ts build", + "apis:auth": "java -jar swagger-codegen-cli.jar generate -i ../backend/api/swagger/auth.swagger.json -l typescript-fetch -o ./server/src/generated/apis/auth -c ./swagger-config.json", + "build": "npm run lint && EXTEND_ESLINT=true react-scripts build", "docker": "COMMIT_HASH=`git rev-parse HEAD`; docker build -q -t ml-pipelines-frontend:${COMMIT_HASH} --build-arg COMMIT_HASH=${COMMIT_HASH} --build-arg DATE=\"`date -u`\" -f Dockerfile ..", - "eject": "react-scripts-ts eject", + "eject": "react-scripts eject", + "format": "prettier --write './**/*.{ts,tsx}'", + "format:check": "prettier --check './**/*.{ts,tsx}' || node ./scripts/check-format-error-info.js", "java": "java -version", - "mock:api": "ts-node-dev -O '{\"module\": \"commonjs\"}' mock-backend/mock-api-server.ts 3001", + "lint": "eslint --ext js,ts,tsx src", + "mock:api": "ts-node-dev -O '{\"module\": \"commonjs\"}' --project mock-backend/tsconfig.json mock-backend/mock-api-server.ts 3001", "mock:server": "node server/dist/server.js build", - "postinstall": "cd ./server && npm i && cd ../mock-backend && npm i && cd ../src/generated/src/apis/metadata && npm i", - "start:proxies": "./start-proxies.sh", - "start": "react-scripts-ts start", - "test": "react-scripts-ts test --env=jsdom", - "test:coverage": "npm test -- --env=jsdom --coverage", - "test:coveralls": "npm run test:coverage && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js", - "vr-approve": "backstop approve", - "vr-test": "ts-node -O '{\"module\": \"commonjs\"}' backstop.ts" + "mock:server:inspect": "node inspect server/dist/server.js build", + "postinstall": "cd ./server && npm ci && cd ../mock-backend && npm ci", + "start:proxy": "./scripts/start-proxy.sh", + "start:proxy-and-server": "./scripts/start-proxy-and-server.sh", + "start:proxy-and-server-inspect": "./scripts/start-proxy-and-server.sh --inspect", + "start": "EXTEND_ESLINT=true react-scripts start", + "sync-backend-sample-config": "node scripts/sync-backend-sample-config.js", + "test": "react-scripts test", + "test:server:coverage": "cd ./server && npm test -- --coverage && cd ..", + "test:coverage": "npm test -- --coverage && npm run test:server:coverage", + "test:ci": "export CI=true && npm run format:check && npm run lint && npm run test:coverage", + "test:ci:prow": "npm set unsafe-perm true && npm ci && npm run test:ci && ./scripts/report-coveralls.sh" }, "devDependencies": { + "@google-cloud/storage": "^4.1.3", + "@testing-library/react": "^9.4.0", "@types/d3": "^5.0.0", "@types/d3-dsv": "^1.0.33", "@types/dagre": "^0.7.40", - "@types/enzyme": "^3.1.15", - "@types/enzyme-adapter-react-16": "^1.0.3", + "@types/enzyme": "^3.10.3", + "@types/enzyme-adapter-react-16": "^1.0.5", "@types/express": "^4.16.0", + "@types/google-protobuf": "^3.7.2", "@types/http-proxy-middleware": "^0.17.5", "@types/jest": "^23.3.2", - "@types/js-yaml": "^3.11.2", + "@types/js-yaml": "^3.12.3", "@types/lodash": ">=4.14.117", - "@types/node": "^10.10.1", - "@types/react": "^16.7.18", - "@types/react-dom": "^16.0.7", + "@types/markdown-to-jsx": "^6.9.0", + "@types/node": "^10.17.11", + "@types/prettier": "^1.19.0", + "@types/react": "^16.9.22", + "@types/react-dom": "^16.9.5", "@types/react-router-dom": "^4.3.1", "@types/react-test-renderer": "^16.0.2", "@types/react-virtualized": "^9.18.7", - "backstopjs": "^3.5.16", + "browserslist": "4.8.7", "coveralls": "^3.0.2", - "enzyme": "^3.7.0", - "enzyme-adapter-react-16": "^1.5.0", + "enzyme": "^3.10.0", + "enzyme-adapter-react-16": "^1.15.1", "enzyme-to-json": "^3.3.4", + "prettier": "1.19.1", "react-router-test-context": "^0.1.0", + "react-scripts": "^3.4.1", "react-test-renderer": "^16.5.2", + "snapshot-diff": "^0.6.1", "swagger-ts-client": "^0.9.6", "ts-node": "^7.0.1", "ts-node-dev": "^1.0.0-pre.30", - "typescript": "^3.3.1", - "webpack-bundle-analyzer": "^3.0.2" + "tslint-config-prettier": "^1.18.0", + "typescript": "^3.7.5", + "webpack": "4.42.0", + "webpack-bundle-analyzer": "^3.6.1" }, "homepage": "./", "jest": { "collectCoverageFrom": [ "src/**/*.{ts,tsx}", + "!src/**/*.d.ts", "!src/apis/**/*.ts", "!src/icons/*.tsx", "!src/third_party/*", "!src/index.tsx", "!src/CSSReset.tsx" ], + "globalSetup": "./global-setup.js", "snapshotSerializers": [ + "./src/__serializers__/mock-function", + "snapshot-diff/serializer.js", "enzyme-to-json/serializer" ] }, - "proxy": "http://localhost:3001" + "proxy": "http://localhost:3001", + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } } diff --git a/frontend/public/index.html b/frontend/public/index.html index 5fbffbcb1fa..9b6b7c44a13 100644 --- a/frontend/public/index.html +++ b/frontend/public/index.html @@ -20,9 +20,13 @@ - - + Kubeflow Pipelines + +