diff --git a/Makefile b/Makefile index f86c82cb9..58bd74e2c 100644 --- a/Makefile +++ b/Makefile @@ -24,5 +24,5 @@ clean: codestyle: which flake8 || echo "Install flake8 with pip3 install --user flake8" # ignores line length and reclass related errors - flake8 --ignore E501 . | grep -v "reclass" + flake8 --ignore E501 . --exclude=reclass @echo diff --git a/kapitan/cli.py b/kapitan/cli.py index 9f04c5254..9343609cc 100644 --- a/kapitan/cli.py +++ b/kapitan/cli.py @@ -203,7 +203,10 @@ def main(): if args.vars: ext_vars = dict(var.split('=') for var in args.vars) json_output = None - _search_imports = lambda cwd, imp: search_imports(cwd, imp, search_paths) + + def _search_imports(cwd, imp): + return search_imports(cwd, imp, search_paths) + json_output = jsonnet_file(file_path, import_callback=_search_imports, native_callbacks=resource_callbacks(search_paths), ext_vars=ext_vars) diff --git a/kapitan/inputs/__init__.py b/kapitan/inputs/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/kapitan/inputs/base.py b/kapitan/inputs/base.py new file mode 100644 index 000000000..5cf01f591 --- /dev/null +++ b/kapitan/inputs/base.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 +# +# Copyright 2018 The Kapitan Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import errno +import logging +import os +import yaml +import ujson as json + +from kapitan.errors import CompileError, KapitanError +from kapitan.refs.base import Revealer +from kapitan.utils import PrettyDumper + +logger = logging.getLogger(__name__) + + +class InputType(object): + def __init__(self, type_name, compile_path, search_paths, ref_controller): + self.type_name = type_name + self.compile_path = compile_path + self.search_paths = search_paths + self.ref_controller = ref_controller + + def compile_obj(self, comp_obj, ext_vars, **kwargs): + """ + run compile_input_path() for each input_path in comp_obj + kwargss are passed into compile_input_path() + """ + input_type = comp_obj["input_type"] + assert input_type == self.type_name + input_paths = comp_obj["input_paths"] + + for input_path in input_paths: + self.compile_input_path(input_path, comp_obj, ext_vars, **kwargs) + + def compile_input_path(self, input_path, comp_obj, ext_vars, **kwargs): + """ + compile and validate input_path in comp_obj + kwargs are passed into compile_file() + """ + target_name = ext_vars["target"] + output_path = comp_obj["output_path"] + output_type = comp_obj.get("output_type", self.default_output_type()) + file_found = False + + for path in self.search_paths: + compile_file_sp = os.path.join(path, input_path) + if os.path.exists(compile_file_sp): + file_found = True + logger.debug("Compiling %s", compile_file_sp) + try: + _compile_path = os.path.join(self.compile_path, target_name, output_path) + self.compile_file(compile_file_sp, _compile_path, ext_vars, output=output_type, + target_name=target_name, **kwargs) + except KapitanError as e: + raise CompileError("{}\nCompile error: failed to compile target: {}".format(e, target_name)) + + if not file_found: + raise CompileError("Compile error: {} for target: {} not found in " + "search_paths: {}".format(input_path, target_name, self.search_paths)) + + def make_compile_dirs(self, target_name, output_path): + """make compile dirs, skips if dirs exist""" + _compile_path = os.path.join(self.compile_path, target_name, output_path) + # support writing to an already existent dir + os.makedirs(_compile_path, exist_ok=True) + + def compile_file(self, file_path, compile_path, ext_vars, **kwargs): + """implements compilation for file_path to compile_path with ext_vars""" + return NotImplementedError + + def default_output_type(self): + "returns default output_type value" + return NotImplementedError + + +class CompilingFile(object): + def __init__(self, context, fp, ref_controller, **kwargs): + self.fp = fp + self.ref_controller = ref_controller + self.kwargs = kwargs + self.revealer = Revealer(ref_controller) + + def write(self, data): + """write data into file""" + reveal = self.kwargs.get('reveal', False) + target_name = self.kwargs.get('target_name', None) + if reveal: + self.fp.write(self.revealer.reveal_raw(data)) + else: + self.fp.write(self.revealer.compile_raw(data, target_name=target_name)) + + def write_yaml(self, obj): + """recursively compile or reveal refs and convert obj to yaml and write to file""" + indent = self.kwargs.get('indent', 2) + reveal = self.kwargs.get('reveal', False) + target_name = self.kwargs.get('target_name', None) + if reveal: + self.revealer.reveal_obj(obj) + else: + self.revealer.compile_obj(obj, target_name=target_name) + yaml.dump(obj, stream=self.fp, indent=indent, Dumper=PrettyDumper, default_flow_style=False) + logger.debug("Wrote %s", self.fp.name) + + def write_json(self, obj): + """recursively hash or reveal refs and convert obj to json and write to file""" + indent = self.kwargs.get('indent', 2) + reveal = self.kwargs.get('reveal', False) + target_name = self.kwargs.get('target_name', None) + if reveal: + self.revealer.reveal_obj(obj) + else: + self.revealer.compile_obj(obj, target_name=target_name) + json.dump(obj, self.fp, indent=indent, escape_forward_slashes=False) + logger.debug("Wrote %s", self.fp.name) + + +class CompiledFile(object): + def __init__(self, name, ref_controller, **kwargs): + self.name = name + self.fp = None + self.ref_controller = ref_controller + self.kwargs = kwargs + + def __enter__(self): + mode = self.kwargs.get("mode", "r") + self.fp = open(self.name, mode) + return CompilingFile(self, self.fp, self.ref_controller, **self.kwargs) + + def __exit__(self, *args): + self.fp.close() diff --git a/kapitan/inputs/jinja2.py b/kapitan/inputs/jinja2.py new file mode 100644 index 000000000..47b4b675d --- /dev/null +++ b/kapitan/inputs/jinja2.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +# +# Copyright 2018 The Kapitan Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import errno +import logging +import os + +from kapitan.inputs.base import InputType, CompiledFile +from kapitan.resources import inventory +from kapitan.utils import render_jinja2 + +logger = logging.getLogger(__name__) + + +class Jinja2(InputType): + def __init__(self, compile_path, search_paths, ref_controller): + super().__init__("jinja2", compile_path, search_paths, ref_controller) + + def compile_file(self, file_path, compile_path, ext_vars, **kwargs): + """ + Write items in path as jinja2 rendered files to compile_path. + path can be either a file or directory. + kwargs: + reveal: default False, set to reveal refs on compile + target_name: default None, set to current target being compiled + """ + reveal = kwargs.get('reveal', False) + target_name = kwargs.get('target_name', None) + + # set ext_vars and inventory for jinja2 context + context = ext_vars.copy() + context["inventory"] = inventory(self.search_paths, target_name) + context["inventory_global"] = inventory(self.search_paths, None) + + for item_key, item_value in render_jinja2(file_path, context).items(): + full_item_path = os.path.join(compile_path, item_key) + os.makedirs(os.path.dirname(full_item_path), exist_ok=True) + + with CompiledFile(full_item_path, self.ref_controller, mode="w", reveal=reveal, + target_name=target_name) as fp: + fp.write(item_value["content"]) + mode = item_value["mode"] + os.chmod(full_item_path, mode) + logger.debug("Wrote %s with mode %.4o", full_item_path, mode) + + def default_output_type(self): + # no output_type options for jinja2 + return None diff --git a/kapitan/inputs/jsonnet.py b/kapitan/inputs/jsonnet.py new file mode 100644 index 000000000..ac8e6c3fb --- /dev/null +++ b/kapitan/inputs/jsonnet.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +# +# Copyright 2018 The Kapitan Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import os +import ujson as json + +from kapitan.inputs.base import InputType, CompiledFile +from kapitan.resources import search_imports, resource_callbacks +from kapitan.utils import jsonnet_file, prune_empty + +logger = logging.getLogger(__name__) + + +class Jsonnet(InputType): + def __init__(self, compile_path, search_paths, ref_controller): + super().__init__("jsonnet", compile_path, search_paths, ref_controller) + + def compile_file(self, file_path, compile_path, ext_vars, **kwargs): + """ + Write file_path (jsonnet evaluated) items as files to compile_path. + ext_vars will be passed as parameters to jsonnet_file() + kwargs: + output: default 'yaml', accepts 'json' + prune: default False, accepts True + reveal: default False, set to reveal refs on compile + target_name: default None, set to current target being compiled + indent: default 2 + """ + def _search_imports(cwd, imp): + return search_imports(cwd, imp, self.search_paths) + + json_output = jsonnet_file(file_path, import_callback=_search_imports, + native_callbacks=resource_callbacks(self.search_paths), + ext_vars=ext_vars) + json_output = json.loads(json_output) + + output = kwargs.get('output', 'yaml') + prune = kwargs.get('prune', False) + reveal = kwargs.get('reveal', False) + target_name = kwargs.get('target_name', None) + indent = kwargs.get('indent', 2) + + if prune: + json_output = prune_empty(json_output) + logger.debug("Pruned output for: %s", file_path) + + for item_key, item_value in json_output.items(): + # write each item to disk + if output == 'json': + file_path = os.path.join(compile_path, '%s.%s' % (item_key, output)) + with CompiledFile(file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, + indent=indent) as fp: + fp.write_json(item_value) + elif output == 'yaml': + file_path = os.path.join(compile_path, '%s.%s' % (item_key, "yml")) + with CompiledFile(file_path, self.ref_controller, mode="w", reveal=reveal, target_name=target_name, + indent=indent) as fp: + fp.write_yaml(item_value) + else: + raise ValueError('output is neither "json" or "yaml"') + + def default_output_type(self): + return "yaml" diff --git a/kapitan/refs/secrets/awskms.py b/kapitan/refs/secrets/awskms.py index 259c2bcaf..d64e67b65 100644 --- a/kapitan/refs/secrets/awskms.py +++ b/kapitan/refs/secrets/awskms.py @@ -114,10 +114,7 @@ def _encrypt(self, data, key, encode_base64): if isinstance(data, str): _data = data.encode() try: - response = awskms_obj().encrypt( - KeyId=key, - Plaintext=_data - ) + response = awskms_obj().encrypt(KeyId=key, Plaintext=_data) ciphertext = base64.b64encode(response['CiphertextBlob']) self.data = ciphertext self.key = key diff --git a/kapitan/targets.py b/kapitan/targets.py index de906d765..dcbbab351 100644 --- a/kapitan/targets.py +++ b/kapitan/targets.py @@ -18,8 +18,6 @@ import logging import os -import errno -import ujson as json import shutil import sys from functools import partial @@ -29,11 +27,12 @@ import yaml import time -from kapitan.resources import search_imports, resource_callbacks, inventory, inventory_reclass -from kapitan.utils import jsonnet_file, prune_empty, render_jinja2, PrettyDumper, hashable_lru_cache +from kapitan.resources import inventory_reclass +from kapitan.utils import hashable_lru_cache from kapitan.utils import directory_hash, dictionary_hash from kapitan.errors import KapitanError, CompileError -from kapitan.refs.base import Revealer +from kapitan.inputs.jinja2 import Jinja2 +from kapitan.inputs.jsonnet import Jsonnet from kapitan import cached logger = logging.getLogger(__name__) @@ -41,7 +40,7 @@ def compile_targets(inventory_path, search_paths, output_path, parallel, targets, ref_controller, **kwargs): """ - Searches and loads target files, and runs compile_target_file() on a + Searches and loads target files, and runs compile_target() on a multiprocessing pool with parallel number of processes. kwargs are passed to compile_target() """ @@ -288,145 +287,27 @@ def compile_target(target_obj, search_paths, compile_path, ref_controller, **kwa """Compiles target_obj and writes to compile_path""" start = time.time() + compile_objs = target_obj["compile"] ext_vars = target_obj["vars"] target_name = ext_vars["target"] - compile_obj = target_obj["compile"] - for obj in compile_obj: - input_type = obj["input_type"] - input_paths = obj["input_paths"] - output_path = obj["output_path"] - - if input_type == "jsonnet": - _compile_path = os.path.join(compile_path, target_name, output_path) - # support writing to an already existent dir - try: - os.makedirs(_compile_path) - except OSError as ex: - # If directory exists, pass - if ex.errno == errno.EEXIST: - pass - - output_type = obj["output_type"] # output_type is mandatory in jsonnet - for input_path in input_paths: - jsonnet_file_found = False - for path in search_paths: - compile_file_sp = os.path.join(path, input_path) - if os.path.exists(compile_file_sp): - jsonnet_file_found = True - logger.debug("Compiling %s", compile_file_sp) - try: - compile_jsonnet(compile_file_sp, _compile_path, search_paths, ext_vars, ref_controller, - output=output_type, target_name=target_name, **kwargs) - except KapitanError as e: - raise CompileError("{}\nCompile error: failed to compile target: {}".format(e, target_name)) - - if not jsonnet_file_found: - raise CompileError("Compile error: {} for target: {} not found in " - "search_paths: {}".format(input_path, target_name, search_paths)) + jinja2_compiler = Jinja2(compile_path, search_paths, ref_controller) + jsonnet_compiler = Jsonnet(compile_path, search_paths, ref_controller) + for comp_obj in compile_objs: + input_type = comp_obj["input_type"] + output_path = comp_obj["output_path"] if input_type == "jinja2": - _compile_path = os.path.join(compile_path, target_name, output_path) - # support writing to an already existent dir - try: - os.makedirs(_compile_path) - except OSError as ex: - # If directory exists, pass - if ex.errno == errno.EEXIST: - pass - for input_path in input_paths: - jinja2_file_found = False - for path in search_paths: - compile_path_sp = os.path.join(path, input_path) - if os.path.exists(compile_path_sp): - jinja2_file_found = True - # copy ext_vars to dedicated jinja2 context so we can update it - ctx = ext_vars.copy() - ctx["inventory"] = inventory(search_paths, target_name) - ctx["inventory_global"] = inventory(search_paths, None) - try: - compile_jinja2(compile_path_sp, ctx, _compile_path, ref_controller, - target_name=target_name, **kwargs) - except KapitanError as e: - raise CompileError("{}\nCompile error: failed to compile target: {}".format(e, target_name)) - - if not jinja2_file_found: - raise CompileError("Compile error: {} for target: {} not found in " - "search_paths: {}".format(input_path, target_name, search_paths)) - - logger.info("Compiled %s (%.2fs)", target_name, time.time() - start) - - -def compile_jinja2(path, context, compile_path, ref_controller, **kwargs): - """ - Write items in path as jinja2 rendered files to compile_path. - path can be either a file or directory. - kwargs: - reveal: default False, set to reveal refs on compile - target_name: default None, set to current target being compiled - """ - reveal = kwargs.get('reveal', False) - target_name = kwargs.get('target_name', None) - - for item_key, item_value in render_jinja2(path, context).items(): - full_item_path = os.path.join(compile_path, item_key) - try: - os.makedirs(os.path.dirname(full_item_path)) - except OSError as ex: - # If directory exists, pass - if ex.errno == errno.EEXIST: - pass - with CompiledFile(full_item_path, ref_controller, mode="w", reveal=reveal, target_name=target_name) as fp: - fp.write(item_value["content"]) - mode = item_value["mode"] - os.chmod(full_item_path, mode) - logger.debug("Wrote %s with mode %.4o", full_item_path, mode) + input_compiler = jinja2_compiler + elif input_type == "jsonnet": + input_compiler = jsonnet_compiler + else: + raise CompileError("Invalid input_type: \"{}\". Supported input_types: jsonnet, jinja2".format(input_type)) + input_compiler.make_compile_dirs(target_name, output_path) + input_compiler.compile_obj(comp_obj, ext_vars, **kwargs) -def compile_jsonnet(file_path, compile_path, search_paths, ext_vars, ref_controller, **kwargs): - """ - Write file_path (jsonnet evaluated) items as files to compile_path. - Set output to write as json or yaml - search_paths and ext_vars will be passed as parameters to jsonnet_file() - kwargs: - output: default 'yaml', accepts 'json' - prune: default False, accepts True - reveal: default False, set to reveal refs on compile - target_name: default None, set to current target being compiled - indent: default 2 - """ - _search_imports = lambda cwd, imp: search_imports(cwd, imp, search_paths) - json_output = jsonnet_file(file_path, import_callback=_search_imports, - native_callbacks=resource_callbacks(search_paths), - ext_vars=ext_vars) - json_output = json.loads(json_output) - - output = kwargs.get('output', 'yaml') - prune = kwargs.get('prune', False) - reveal = kwargs.get('reveal', False) - target_name = kwargs.get('target_name', None) - indent = kwargs.get('indent', 2) - - if prune: - json_output = prune_empty(json_output) - logger.debug("Pruned output for: %s", file_path) - - for item_key, item_value in json_output.items(): - # write each item to disk - if output == 'json': - file_path = os.path.join(compile_path, '%s.%s' % (item_key, output)) - with CompiledFile(file_path, ref_controller, mode="w", reveal=reveal, target_name=target_name, - indent=indent) as fp: - fp.write_json(item_value) - logger.debug("Wrote %s", file_path) - elif output == 'yaml': - file_path = os.path.join(compile_path, '%s.%s' % (item_key, "yml")) - with CompiledFile(file_path, ref_controller, mode="w", reveal=reveal, target_name=target_name, - indent=indent) as fp: - fp.write_yaml(item_value) - logger.debug("Wrote %s", file_path) - else: - raise ValueError('output is neither "json" or "yaml"') + logger.info("Compiled %s (%.2fs)", target_name, time.time() - start) @hashable_lru_cache @@ -461,58 +342,3 @@ def valid_target_obj(target_obj): } return jsonschema.validate(target_obj, schema) - - -class CompilingFile(object): - def __init__(self, context, fp, ref_controller, **kwargs): - self.fp = fp - self.ref_controller = ref_controller - self.kwargs = kwargs - self.revealer = Revealer(ref_controller) - - def write(self, data): - """write data into file""" - reveal = self.kwargs.get('reveal', False) - target_name = self.kwargs.get('target_name', None) - if reveal: - self.fp.write(self.revealer.reveal_raw(data)) - else: - self.fp.write(self.revealer.compile_raw(data, target_name=target_name)) - - def write_yaml(self, obj): - """recursively compile or reveal refs and convert obj to yaml and write to file""" - indent = self.kwargs.get('indent', 2) - reveal = self.kwargs.get('reveal', False) - target_name = self.kwargs.get('target_name', None) - if reveal: - self.revealer.reveal_obj(obj) - else: - self.revealer.compile_obj(obj, target_name=target_name) - yaml.dump(obj, stream=self.fp, indent=indent, Dumper=PrettyDumper, default_flow_style=False) - - def write_json(self, obj): - """recursively hash or reveal refs and convert obj to json and write to file""" - indent = self.kwargs.get('indent', 2) - reveal = self.kwargs.get('reveal', False) - target_name = self.kwargs.get('target_name', None) - if reveal: - self.revealer.reveal_obj(obj) - else: - self.revealer.compile_obj(obj, target_name=target_name) - json.dump(obj, self.fp, indent=indent, escape_forward_slashes=False) - - -class CompiledFile(object): - def __init__(self, name, ref_controller, **kwargs): - self.name = name - self.fp = None - self.ref_controller = ref_controller - self.kwargs = kwargs - - def __enter__(self): - mode = self.kwargs.get("mode", "r") - self.fp = open(self.name, mode) - return CompilingFile(self, self.fp, self.ref_controller, **self.kwargs) - - def __exit__(self, *args): - self.fp.close()