diff --git a/src/inmanta/const.py b/src/inmanta/const.py index ab2a9d36a7..1437b668ff 100644 --- a/src/inmanta/const.py +++ b/src/inmanta/const.py @@ -112,3 +112,12 @@ class LogLevel(Enum): INMANTA_URN = "urn:inmanta:" + + +class Compilestate(Enum): + success = 1 + failed = 2 + + +EXPORT_META_DATA = "export_metadata" +META_DATA_COMPILE_STATE = "inmanta:compile:state" diff --git a/src/inmanta/export.py b/src/inmanta/export.py index cfbf8cdc86..b81bba50be 100644 --- a/src/inmanta/export.py +++ b/src/inmanta/export.py @@ -266,6 +266,9 @@ def run(self, types, scopes, metadata={}, no_commit=False, include_status=False, # call dependency managers self._call_dep_manager(types) + metadata[const.META_DATA_COMPILE_STATE] = const.Compilestate.success.name + else: + metadata[const.META_DATA_COMPILE_STATE] = const.Compilestate.failed.name # validate the dependency graph self._validate_graph() @@ -440,7 +443,7 @@ def call(): LOGGER.debug("Uploaded file with hash %s" % hash_id) # Collecting version information - version_info = {"export_metadata": metadata, + version_info = {const.EXPORT_META_DATA: metadata, "model": model} # TODO: start transaction diff --git a/src/inmanta/server/server.py b/src/inmanta/server/server.py index a4c39eb97d..93522f6957 100644 --- a/src/inmanta/server/server.py +++ b/src/inmanta/server/server.py @@ -809,36 +809,47 @@ def put_version(self, env, version, resources, resource_state, unknowns, version res_obj = rv_dict[t.resource_str()] res_obj.provides.append(f.resource_version_id) - # search for deleted resources - resources_to_purge = yield data.Resource.get_deleted_resources(env.id, version, set(rv_dict.keys())) - previous_requires = {} - for res in resources_to_purge: - LOGGER.warning("Purging %s, purged resource based on %s" % (res.resource_id, res.resource_version_id)) - - attributes = res.attributes.copy() - attributes["purged"] = "true" - attributes["requires"] = [] - - res_obj = data.Resource.new(env.id, resource_version_id="%s,v=%s" % (res.resource_id, version), - attributes=attributes) - resource_objects.append(res_obj) - - previous_requires[res_obj.resource_id] = res.attributes["requires"] - resource_version_ids.append(res_obj.resource_version_id) - agents.add(res_obj.agent) - rv_dict[res_obj.resource_id] = res_obj - - # invert dependencies on purges - for res_id, requires in previous_requires.items(): - res_obj = rv_dict[res_id] - for require in requires: - req_id = Id.parse_id(require) - - if req_id.resource_str() in rv_dict: - req_res = rv_dict[req_id.resource_str()] - - req_res.attributes["requires"].append(res_obj.resource_version_id) - res_obj.provides.append(req_res.resource_version_id) + # detect failed compiles + def safe_get(input, key, default): + if not isinstance(input, dict): + return default + if key not in input: + return default + return input[key] + metadata = safe_get(version_info, const.EXPORT_META_DATA, {}) + compile_state = safe_get(metadata, const.META_DATA_COMPILE_STATE, "") + failed = compile_state == const.Compilestate.failed.name + + if not failed: + # search for deleted resources + resources_to_purge = yield data.Resource.get_deleted_resources(env.id, version, set(rv_dict.keys())) + previous_requires = {} + for res in resources_to_purge: + LOGGER.warning("Purging %s, purged resource based on %s" % (res.resource_id, res.resource_version_id)) + + attributes = res.attributes.copy() + attributes["purged"] = "true" + attributes["requires"] = [] + res_obj = data.Resource.new(env.id, resource_version_id="%s,v=%s" % (res.resource_id, version), + attributes=attributes) + resource_objects.append(res_obj) + + previous_requires[res_obj.resource_id] = res.attributes["requires"] + resource_version_ids.append(res_obj.resource_version_id) + agents.add(res_obj.agent) + rv_dict[res_obj.resource_id] = res_obj + + # invert dependencies on purges + for res_id, requires in previous_requires.items(): + res_obj = rv_dict[res_id] + for require in requires: + req_id = Id.parse_id(require) + + if req_id.resource_str() in rv_dict: + req_res = rv_dict[req_id.resource_str()] + + req_res.attributes["requires"].append(res_obj.resource_version_id) + res_obj.provides.append(req_res.resource_version_id) undeployable = [res.resource_id for res in undeployable] # get skipped for undeployable @@ -1797,7 +1808,7 @@ def decomission_environment(self, env, metadata): "message": "Decommission of environment", "type": "api" } - result = yield self.put_version(env, version, [], {}, [], {"export_metadata": metadata}) + result = yield self.put_version(env, version, [], {}, [], {const.EXPORT_META_DATA: metadata}) return result, {"version": version} @protocol.handle(methods.Decommision.clear_environment, env="id") diff --git a/tests/conftest.py b/tests/conftest.py index 3568693fa8..f706cc22c2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -41,6 +41,8 @@ import re from tornado.ioloop import IOLoop from inmanta.server.bootloader import InmantaBootloader +from inmanta.export import cfg_env, unknown_parameters +import traceback from tornado import process @@ -71,6 +73,7 @@ def reset_all(): # command.Commander.reset() handler.Commander.reset() Project._project = None + unknown_parameters.clear() @pytest.fixture(scope="function", autouse=True) @@ -288,6 +291,8 @@ def create_env(): result = io_loop.run_sync(create_env) env_id = result.result["environment"]["id"] + cfg_env.set(env_id) + yield env_id @@ -326,11 +331,15 @@ def tearDownClass(self): # reset cwd os.chdir(self.cwd) - def setup_for_snippet(self, snippet, autostd=True): + def setup_func(self): # init project self.project_dir = tempfile.mkdtemp() os.symlink(self.env, os.path.join(self.project_dir, ".env")) + def tear_down_func(self): + shutil.rmtree(self.project_dir) + + def setup_for_snippet(self, snippet, autostd=True): with open(os.path.join(self.project_dir, "project.yml"), "w") as cfg: cfg.write( """ @@ -349,23 +358,34 @@ def setup_for_snippet(self, snippet, autostd=True): Project.set(Project(self.project_dir, autostd=autostd)) - def do_export(self, deploy=False, include_status=False): + def do_export(self, deploy=False, include_status=False, do_raise=True): templfile = mktemp("json", "dump", self.project_dir) - from inmanta.export import Exporter - - (types, scopes) = compiler.do_compile() - class Options(object): pass + options = Options() options.json = templfile if not deploy else None options.depgraph = False options.deploy = deploy options.ssl = False - export = Exporter(options=options) - return export.run(types, scopes, include_status=include_status) + from inmanta.export import Exporter # noqa: H307 + + try: + (types, scopes) = compiler.do_compile() + except Exception: + types, scopes = (None, None) + if do_raise: + raise + else: + traceback.print_exc() + + # Even if the compile failed we might have collected additional data such as unknowns. So + # continue the export + + export = Exporter(options) + return export.run(types, scopes, model_export=False, include_status=include_status) def setup_for_error(self, snippet, shouldbe): self.setup_for_snippet(snippet) @@ -391,14 +411,20 @@ def setup_for_error_re(self, snippet, shouldbe): @pytest.fixture(scope="session") -def snippetcompiler(): +def snippetcompiler_global(): ast = SnippetCompilationTest() ast.setUpClass() yield ast - shutil.rmtree(ast.project_dir) ast.tearDownClass() +@pytest.fixture(scope="function") +def snippetcompiler(snippetcompiler_global): + snippetcompiler_global.setup_func() + yield snippetcompiler_global + snippetcompiler_global.tear_down_func() + + class CLI(object): def __init__(self, io_loop): diff --git a/tests/test_export.py b/tests/test_export.py index 386b666e4d..22c013f365 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -158,7 +158,6 @@ def test_empty_server_export(snippetcompiler, server, client): @pytest.mark.gen_test def test_server_export(snippetcompiler, server, client, environment): - config.Config.set("config", "environment", environment) snippetcompiler.setup_for_snippet(""" h = std::Host(name="test", os=std::linux) f = std::ConfigFile(host=h, path="/etc/motd", content="test") diff --git a/tests/test_resource.py b/tests/test_resource.py index 2fdb86043c..d66ec74ce5 100644 --- a/tests/test_resource.py +++ b/tests/test_resource.py @@ -19,8 +19,6 @@ from inmanta import resources import pytest from inmanta.resources import resource, ResourceException -from conftest import SnippetCompilationTest -import shutil class Base(resources.Resource): @@ -56,7 +54,7 @@ class Test(Base): fields = ("z",) -def test_resource_base(): +def test_resource_base(snippetcompiler): import inmanta.resources @@ -67,37 +65,31 @@ class MyResource(inmanta.resources.Resource): """ fields = ("key", "value", "agent") - snippetcompiler = SnippetCompilationTest() - snippetcompiler.setUpClass() - try: - snippetcompiler.setup_for_snippet(""" - entity XResource: - string key - string agent - string value - end + snippetcompiler.setup_for_snippet(""" + entity XResource: + string key + string agent + string value + end - implement XResource using none + implement XResource using none - implementation none for XResource: - end + implementation none for XResource: + end - XResource(key="key", agent="agent", value="value") - """, autostd=False) - _version, json_value = snippetcompiler.do_export() + XResource(key="key", agent="agent", value="value") + """, autostd=False) + _version, json_value = snippetcompiler.do_export() - assert len(json_value) == 1 - myresource = next(json_value.values().__iter__()) + assert len(json_value) == 1 + myresource = next(json_value.values().__iter__()) - assert myresource.key == "key" - assert myresource.agent == "agent" - assert myresource.value == "value" - finally: - shutil.rmtree(snippetcompiler.project_dir) - snippetcompiler.tearDownClass() + assert myresource.key == "key" + assert myresource.agent == "agent" + assert myresource.value == "value" -def test_resource_base_with_method_key(): +def test_resource_base_with_method_key(snippetcompiler): import inmanta.resources @@ -112,31 +104,25 @@ class MyResource(inmanta.resources.Resource): def get_serialize(_, resource): return resource.key - snippetcompiler = SnippetCompilationTest() - snippetcompiler.setUpClass() - try: - snippetcompiler.setup_for_snippet(""" - entity XResource: - string key - string agent - string value - end + snippetcompiler.setup_for_snippet(""" + entity XResource: + string key + string agent + string value + end - implement XResource using none + implement XResource using none - implementation none for XResource: - end + implementation none for XResource: + end - XResource(key="key", agent="agent", value="value") - """, autostd=False) - with pytest.raises(ResourceException): - snippetcompiler.do_export() - finally: - shutil.rmtree(snippetcompiler.project_dir) - snippetcompiler.tearDownClass() + XResource(key="key", agent="agent", value="value") + """, autostd=False) + with pytest.raises(ResourceException): + snippetcompiler.do_export() -def test_resource_with_keyword(): +def test_resource_with_keyword(snippetcompiler): import inmanta.resources @@ -151,32 +137,26 @@ class MyResource(inmanta.resources.Resource): def get_model(_, resource): return resource.key - snippetcompiler = SnippetCompilationTest() - snippetcompiler.setUpClass() - try: - snippetcompiler.setup_for_snippet(""" - entity YResource: - string key - string agent - string value - end + snippetcompiler.setup_for_snippet(""" + entity YResource: + string key + string agent + string value + end - implement YResource using none + implement YResource using none - implementation none for YResource: - end + implementation none for YResource: + end - YResource(key="key", agent="agent", value="value") - """, autostd=False) + YResource(key="key", agent="agent", value="value") + """, autostd=False) - with pytest.raises(ResourceException): - snippetcompiler.do_export() - finally: - shutil.rmtree(snippetcompiler.project_dir) - snippetcompiler.tearDownClass() + with pytest.raises(ResourceException): + snippetcompiler.do_export() -def test_resource_with_private_method(): +def test_resource_with_private_method(snippetcompiler): import inmanta.resources @@ -187,26 +167,20 @@ class MyResource(inmanta.resources.Resource): """ fields = ("__setattr__", "key", "value", "agent") - snippetcompiler = SnippetCompilationTest() - snippetcompiler.setUpClass() - try: - snippetcompiler.setup_for_snippet(""" - entity YResource: - string key - string agent - string value - end - - implement YResource using none - - implementation none for YResource: - end - - YResource(key="key", agent="agent", value="value") - """, autostd=False) - - with pytest.raises(ResourceException): - snippetcompiler.do_export() - finally: - shutil.rmtree(snippetcompiler.project_dir) - snippetcompiler.tearDownClass() + snippetcompiler.setup_for_snippet(""" + entity YResource: + string key + string agent + string value + end + + implement YResource using none + + implementation none for YResource: + end + + YResource(key="key", agent="agent", value="value") + """, autostd=False) + + with pytest.raises(ResourceException): + snippetcompiler.do_export() diff --git a/tests/test_server.py b/tests/test_server.py index 33a564f99a..4777cc7686 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -30,6 +30,8 @@ from uuid import UUID from inmanta.export import upload_code from inmanta.util import hash_file +from inmanta.export import unknown_parameters +from threading import Thread LOGGER = logging.getLogger(__name__) @@ -564,6 +566,130 @@ def test_purge_on_delete_requires(io_loop, client, server, environment): assert file1["id"] in file2["provides"] +@pytest.mark.gen_test +def test_purge_on_delete_compile_failed_with_compile(io_loop, client, server, environment, snippetcompiler): + # run in threads to allow run_sync to work + + def i1(): + snippetcompiler.setup_for_snippet(""" + h = std::Host(name="test", os=std::linux) + f = std::ConfigFile(host=h, path="/etc/motd", content="test", purge_on_delete=true) + """) + version, _ = snippetcompiler.do_export(deploy=True, do_raise=False) + result = yield client.get_version(environment, version) + assert result.code == 200 + assert result.result["model"]["total"] == 1 + + def i2(): + snippetcompiler.setup_for_snippet(""" + h = std::Host(name="test") + """) + + # force deploy by having unknown + unknown_parameters.append({}) + + version, _ = snippetcompiler.do_export(deploy=True, do_raise=False) + result = yield client.get_version(environment, version) + assert result.code == 200 + assert result.result["model"]["total"] == 0 + + t1 = Thread(target=i1) + t1.start() + t1.join() + + t1 = Thread(target=i2) + t1.start() + t1.join() + + +@pytest.mark.gen_test +def test_purge_on_delete_compile_failed(io_loop, client, server, environment): + """ + Test purge on delete of resources + """ + agent = Agent(io_loop, "localhost", {"blah": "localhost"}, environment=environment) + agent.start() + aclient = agent._client + + version = 1 + + resources = [{'group': 'root', + 'hash': '89bf880a0dc5ffc1156c8d958b4960971370ee6a', + 'id': 'std::File[vm1,path=/tmp/file1],v=%d' % version, + 'owner': 'root', + 'path': '/tmp/file1', + 'permissions': 644, + 'purged': False, + 'reload': False, + 'requires': [], + 'purge_on_delete': True, + 'version': version}, + {'group': 'root', + 'hash': 'b4350bef50c3ec3ee532d4a3f9d6daedec3d2aba', + 'id': 'std::File[vm1,path=/tmp/file2],v=%d' % version, + 'owner': 'root', + 'path': '/tmp/file2', + 'permissions': 644, + 'purged': False, + 'reload': False, + 'purge_on_delete': True, + 'requires': ['std::File[vm1,path=/tmp/file1],v=%d' % version], + 'version': version}, + {'group': 'root', + 'hash': '89bf880a0dc5ffc1156c8d958b4960971370ee6a', + 'id': 'std::File[vm1,path=/tmp/file3],v=%d' % version, + 'owner': 'root', + 'path': '/tmp/file3', + 'permissions': 644, + 'purged': False, + 'reload': False, + 'requires': [], + 'purge_on_delete': True, + 'version': version}] + + res = yield client.put_version(tid=environment, version=version, resources=resources, unknowns=[], version_info={}) + assert res.code == 200 + + # Release the model and set all resources as deployed + result = yield client.release_version(environment, version, push=False) + assert result.code == 200 + + now = datetime.now() + result = yield aclient.resource_action_update(environment, + ['std::File[vm1,path=/tmp/file1],v=%d' % version], + uuid.uuid4(), "deploy", now, now, "deployed", [], {}) + assert result.code == 200 + + result = yield aclient.resource_action_update(environment, + ['std::File[vm1,path=/tmp/file2],v=%d' % version], + uuid.uuid4(), "deploy", now, now, "deployed", [], {}) + assert result.code == 200 + + result = yield aclient.resource_action_update(environment, + ['std::File[vm1,path=/tmp/file3],v=%d' % version], + uuid.uuid4(), "deploy", now, now, "deployed", [], {}) + assert result.code == 200 + + result = yield client.get_version(environment, version) + assert result.code == 200 + assert result.result["model"]["version"] == version + assert result.result["model"]["total"] == len(resources) + assert result.result["model"]["done"] == len(resources) + assert result.result["model"]["released"] + assert result.result["model"]["result"] == const.VersionState.success.name + + # New version with only file3 + version = 2 + res = yield client.put_version(tid=environment, version=version, resources=[], unknowns=[], + version_info={const.EXPORT_META_DATA: + {const.META_DATA_COMPILE_STATE: const.Compilestate.failed}}) + assert result.code == 200 + + result = yield client.get_version(environment, version) + assert result.code == 200 + assert result.result["model"]["total"] == 0 + + @pytest.mark.gen_test def test_purge_on_delete(io_loop, client, server, environment): """