From e3864ead41f68d9900bd30293d09049c43c0832e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 14 Oct 2022 15:21:25 +0200 Subject: [PATCH 01/68] OP-3426 - DL support for Maya automatic tests WIP --- tests/integration/hosts/maya/lib.py | 16 ++++- .../maya/test_deadline_publish_in_maya.py | 70 +++++++++++++++++++ .../hosts/maya/test_publish_in_maya.py | 4 +- tests/lib/testing_classes.py | 54 +++++++++++++- 4 files changed, 139 insertions(+), 5 deletions(-) create mode 100644 tests/integration/hosts/maya/test_deadline_publish_in_maya.py diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index f3a438c0650..6610fac118a 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -2,10 +2,14 @@ import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, + DeadlinePublishTest +) -class MayaTestClass(HostFixtures): +class MayaHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -39,3 +43,11 @@ def startup_scripts(self, monkeypatch_session, download_test_data): "{}{}{}".format(startup_path, os.pathsep, original_pythonpath)) + + +class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest): + """Testing class for local publishes.""" + + +class MayaDeadlinePublishTestClass(MayaHostFixtures, DeadlinePublishTest): + """Testing class for Deadline publishes.""" diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py new file mode 100644 index 00000000000..f14310a16cd --- /dev/null +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -0,0 +1,70 @@ +from tests.integration.hosts.maya.lib import MayaDeadlinePublishTestClass + + +class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): + """Basic test case for publishing in Maya + + Shouldnt be running standalone only via 'runtests' pype command! (??) + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Always pulls and uses test data from GDrive! + + Opens Maya, runs publish on prepared workile. + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + How to run: + (in cmd with activated {OPENPYPE_ROOT}/.venv) + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/maya # noqa: E501 + + """ + PERSIST = True + + TEST_FILES = [ + ("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "") + ] + + APP = "maya" + # keep empty to locate latest installed variant or explicit + APP_VARIANT = "" + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + assert 5 == dbcon.count_documents({"type": "version"}), \ + "Not expected no of versions" + + assert 0 == dbcon.count_documents({"type": "version", + "name": {"$ne": 1}}), \ + "Only versions with 1 expected" + + assert 1 == dbcon.count_documents({"type": "subset", + "name": "modelMain"}), \ + "modelMain subset must be present" + + assert 1 == dbcon.count_documents({"type": "subset", + "name": "workfileTest_task"}), \ + "workfileTest_task subset must be present" + + assert 11 == dbcon.count_documents({"type": "representation"}), \ + "Not expected no of representations" + + assert 2 == dbcon.count_documents({"type": "representation", + "context.subset": "modelMain", + "context.ext": "abc"}), \ + "Not expected no of representations with ext 'abc'" + + assert 2 == dbcon.count_documents({"type": "representation", + "context.subset": "modelMain", + "context.ext": "ma"}), \ + "Not expected no of representations with ext 'abc'" + + +if __name__ == "__main__": + test_case = TestDeadlinePublishInMaya() diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 68b05644280..39b3f122633 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -1,7 +1,7 @@ -from tests.integration.hosts.maya.lib import MayaTestClass +from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass -class TestPublishInMaya(MayaTestClass): +class TestPublishInMaya(MayaLocalPublishTestClass): """Basic test case for publishing in Maya Shouldnt be running standalone only via 'runtests' pype command! (??) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 78a9f810954..9d7dfa923ed 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -8,9 +8,11 @@ import shutil import glob import platform +import requests from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler +from openpype.modules import ModulesManager class BaseTest: @@ -333,7 +335,57 @@ def test_folder_structure_same(self, dbcon, publish_finished, "\n".join(sorted(not_matched))) -class HostFixtures(PublishTest): +class DeadlinePublishTest(PublishTest): + @pytest.fixture(scope="module") + def publish_finished(self, dbcon, launched_app, download_test_data, + timeout): + """Dummy fixture waiting for publish to finish""" + import time + time_start = time.time() + timeout = timeout or self.TIMEOUT + timeout = float(timeout) + while launched_app.poll() is None: + time.sleep(0.5) + if time.time() - time_start > timeout: + launched_app.terminate() + raise ValueError("Timeout reached") + + deadline_job_id = os.environ.get("DEADLINE_PUBLISH_JOB_ID") + if not deadline_job_id: + raise ValueError("DEADLINE_PUBLISH_JOB_ID empty, cannot find job") + + modules_manager = ModulesManager() + deadline_module = modules_manager.modules_by_name("deadline") + deadline_url = deadline_module.deadline_urls["default"] + + if not deadline_url: + raise ValueError("Must have default deadline url.") + + url = "{}/api/jobs?JobId={}".format(deadline_url, deadline_job_id) + date_finished = None + + time_start = time.time() + while not date_finished: + time.sleep(0.5) + if time.time() - time_start > timeout: + raise ValueError("Timeout for DL finish reached") + + response = requests.get(url, timeout=10) + if not response.ok: + msg = "Couldn't connect to {}".format(deadline_url) + raise RuntimeError(msg) + + if not response.json(): + raise ValueError("Couldn't find {}".format(deadline_job_id)) + + date_finished = response.json()[0]["DateComp"] + + # some clean exit test possible? + print("Publish finished") + yield True + + +class HostFixtures(): """Host specific fixtures. Should be implemented once per host.""" @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): From 5ee97fa8e2497058ef407cad82fdfa38faee3bec Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 14 Oct 2022 15:24:48 +0200 Subject: [PATCH 02/68] OP-3426 - DL support for AE automatic tests WIP --- tests/integration/hosts/aftereffects/lib.py | 16 +++- .../test_deadline_publish_in_aftereffects.py | 79 +++++++++++++++++++ .../test_publish_in_aftereffects.py | 4 +- 3 files changed, 95 insertions(+), 4 deletions(-) create mode 100644 tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 9fffc6073da..ca637edba68 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -2,10 +2,14 @@ import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, + DeadlinePublishTest +) -class AfterEffectsTestClass(HostFixtures): +class AEHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -32,3 +36,11 @@ def last_workfile_path(self, download_test_data, output_folder_url): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + +class AELocalPublishTestClass(AEHostFixtures, PublishTest): + """Testing class for local publishes.""" + + +class AEDeadlinePublishTestClass(AEHostFixtures, DeadlinePublishTest): + """Testing class for Deadline publishes.""" diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py new file mode 100644 index 00000000000..5d2a7d7ebd9 --- /dev/null +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py @@ -0,0 +1,79 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.aftereffects.lib import AEDeadlinePublishTestClass + +log = logging.getLogger("test_publish_in_aftereffects") + + +class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass): + """Basic test case for DL publishing in AfterEffects + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Opens AfterEffects, run DL publish on prepared workile. + + Test zip file sets 3 required env vars: + - HEADLESS_PUBLISH - this triggers publish immediately app is open + - IS_TEST - this differentiate between regular webpublish + - PYBLISH_TARGETS + + Waits for publish job on DL is finished. + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + """ + PERSIST = False + + TEST_FILES = [ + ("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf", + "test_aftereffects_publish.zip", + "") + ] + + APP = "aftereffects" + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="imageMainBackgroundcopy")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="reviewTesttask")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 4)) + + additional_args = {"context.subset": "renderTestTaskDefault", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestDeadlinePublishInAfterEffects() diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index 4925cbd2d7f..191e86ebafb 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -1,12 +1,12 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects Uses generic TestCase to prepare fixtures for test data, testing DBs, From be51b3d977be2032a9386df32a41b26accb2faae Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 14 Oct 2022 17:13:49 +0200 Subject: [PATCH 03/68] OP-3426 - fix not dropping settings collection --- tests/lib/db_handler.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b1810550124..108ebf560d9 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -118,9 +118,8 @@ def setup_from_dump(self, db_name, dump_dir, overwrite=False, "Run with overwrite=True") else: if collection: - coll = self.client[db_name_out].get(collection) - if coll: - coll.drop() + if collection in self.client[db_name_out].list_collection_names(): + self.client[db_name_out][collection].drop() else: self.teardown(db_name_out) From d5e62039a760b6fa4bd200276f6adc0b3e4304b1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 14 Oct 2022 19:26:21 +0200 Subject: [PATCH 04/68] OP-3426 - wait for DL publish job --- tests/lib/testing_classes.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 9d7dfa923ed..e6f747c6a4b 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -350,12 +350,22 @@ def publish_finished(self, dbcon, launched_app, download_test_data, launched_app.terminate() raise ValueError("Timeout reached") - deadline_job_id = os.environ.get("DEADLINE_PUBLISH_JOB_ID") - if not deadline_job_id: - raise ValueError("DEADLINE_PUBLISH_JOB_ID empty, cannot find job") + metadata_json = glob.glob(os.path.join(download_test_data, + "**/*_metadata.json"), + recursive=True) + if not metadata_json: + raise RuntimeError("No metadata file found. No job id.") - modules_manager = ModulesManager() - deadline_module = modules_manager.modules_by_name("deadline") + if len(metadata_json) > 1: + raise RuntimeError("Too many metadata files found.") + + with open(metadata_json[0]) as fp: + job_info = json.load(fp) + + deadline_job_id = job_info["job"]["_id"] + + manager = ModulesManager() + deadline_module = manager.modules_by_name["deadline"] deadline_url = deadline_module.deadline_urls["default"] if not deadline_url: @@ -378,7 +388,7 @@ def publish_finished(self, dbcon, launched_app, download_test_data, if not response.json(): raise ValueError("Couldn't find {}".format(deadline_job_id)) - date_finished = response.json()[0]["DateComp"] + date_finished = response.json()[0]["MainEnd"] # some clean exit test possible? print("Publish finished") From abe5e24309e882eac5c51b080dc9c4069741de18 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 13:49:20 +0200 Subject: [PATCH 05/68] OP-3426 - fix check if job is finished --- tests/lib/testing_classes.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index e6f747c6a4b..8102ad732da 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -372,10 +372,10 @@ def publish_finished(self, dbcon, launched_app, download_test_data, raise ValueError("Must have default deadline url.") url = "{}/api/jobs?JobId={}".format(deadline_url, deadline_job_id) - date_finished = None + valid_date_finished = None time_start = time.time() - while not date_finished: + while not valid_date_finished: time.sleep(0.5) if time.time() - time_start > timeout: raise ValueError("Timeout for DL finish reached") @@ -388,7 +388,8 @@ def publish_finished(self, dbcon, launched_app, download_test_data, if not response.json(): raise ValueError("Couldn't find {}".format(deadline_job_id)) - date_finished = response.json()[0]["MainEnd"] + # '0001-...' returned until job is finished + valid_date_finished = response.json()[0]["DateComp"][:4] != "0001" # some clean exit test possible? print("Publish finished") From 646dcd92ae3ce91332321cf9706f1345496e5421 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 14:02:39 +0200 Subject: [PATCH 06/68] OP-3426 - fix proper zip id for deadline test --- .../hosts/aftereffects/test_deadline_publish_in_aftereffects.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py index 5d2a7d7ebd9..103ce6fe5b8 100644 --- a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py @@ -29,7 +29,7 @@ class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass): PERSIST = False TEST_FILES = [ - ("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf", + ("1xhd2ij2ixyjCyTjZgcJEPAIiBHLU1FEY", "test_aftereffects_publish.zip", "") ] From a45218d48a86220f190f8786f17e9264b80e8a95 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 14:03:57 +0200 Subject: [PATCH 07/68] OP-3426 - fix proper id for publish job It should wait for publish job, not rendering jobs. --- .../deadline/plugins/publish/submit_publish_job.py | 12 +++++++++++- tests/lib/testing_classes.py | 2 +- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index aba505b3c64..0e418ccf778 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -212,6 +212,8 @@ def _submit_deadline_post_job(self, instance, job, instances): more universal code. Muster post job is sent directly by Muster submitter, so this type of code isn't necessary for it. + Returns: + (str): deadline_publish_job_id """ data = instance.data.copy() subset = data["subset"] @@ -331,6 +333,10 @@ def _submit_deadline_post_job(self, instance, job, instances): if not response.ok: raise Exception(response.text) + deadline_publish_job_id = response.json()["_id"] + + return deadline_publish_job_id + def _copy_extend_frames(self, instance, representation): """Copy existing frames from latest version. @@ -984,7 +990,8 @@ def process(self, instance): self.deadline_url = instance.data.get("deadlineUrl") assert self.deadline_url, "Requires Deadline Webservice URL" - self._submit_deadline_post_job(instance, render_job, instances) + deadline_publish_job_id = \ + self._submit_deadline_post_job(instance, render_job, instances) # publish job file publish_job = { @@ -1002,6 +1009,9 @@ def process(self, instance): "instances": instances } + if deadline_publish_job_id: + publish_job["deadline_publish_job_id"] = deadline_publish_job_id + # add audio to metadata file if available audio_file = context.data.get("audioFile") if audio_file and os.path.isfile(audio_file): diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 8102ad732da..53eeae10cbd 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -362,7 +362,7 @@ def publish_finished(self, dbcon, launched_app, download_test_data, with open(metadata_json[0]) as fp: job_info = json.load(fp) - deadline_job_id = job_info["job"]["_id"] + deadline_job_id = job_info["deadline_publish_job_id"] manager = ModulesManager() deadline_module = manager.modules_by_name["deadline"] From 6f5c91c785d3678ca76b25b613a6bd9369315a45 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 14:11:54 +0200 Subject: [PATCH 08/68] OP-3426 - refactor - Nuke to new testing classes --- tests/integration/hosts/nuke/lib.py | 18 +++++++++++++++--- .../hosts/nuke/test_publish_in_nuke.py | 9 +++++---- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index d3c3d7ba816..e3c610199b0 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -2,10 +2,14 @@ import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, + DeadlinePublishTest +) -class NukeTestClass(HostFixtures): +class NukeHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -41,4 +45,12 @@ def startup_scripts(self, monkeypatch_session, download_test_data): monkeypatch_session.setenv("NUKE_PATH", "{}{}{}".format(startup_path, os.pathsep, - original_nuke_path)) \ No newline at end of file + original_nuke_path)) + + +class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): + """Testing class for local publishes.""" + + +class NukeDeadlinePublishTestClass(NukeHostFixtures, DeadlinePublishTest): + """Testing class for Deadline publishes.""" diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 884160e0b59..88defdc37a7 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -1,12 +1,12 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.nuke.lib import NukeTestClass +from tests.integration.hosts.nuke.lib import NukeLocalPublishTestClass log = logging.getLogger("test_publish_in_nuke") -class TestPublishInNuke(NukeTestClass): +class TestPublishInNuke(NukeLocalPublishTestClass): """Basic test case for publishing in Nuke Uses generic TestCase to prepare fixtures for test data, testing DBs, @@ -20,7 +20,8 @@ class TestPublishInNuke(NukeTestClass): How to run: (in cmd with activated {OPENPYPE_ROOT}/.venv) - {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/nuke # noqa: E501 + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py + runtests ../tests/integration/hosts/nuke # noqa: E501 To check log/errors from launched app's publish process keep PERSIST to True and check `test_openpype.logs` collection. @@ -37,7 +38,7 @@ class TestPublishInNuke(NukeTestClass): # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - PERSIST = True # True - keep test_db, test_openpype, outputted test files + PERSIST = False # True - keep test_db, test_openpype, outputted test files TEST_DATA_FOLDER = None def test_db_asserts(self, dbcon, publish_finished): From fdaa7774a48746ff6ec2bc967fe512b9e3bb2994 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 14:22:52 +0200 Subject: [PATCH 09/68] OP-3426 - added new Nuke deadline testing class --- .../nuke/test_deadine_publish_in_nuke.py | 76 +++++++++++++++++++ 1 file changed, 76 insertions(+) create mode 100644 tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py diff --git a/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py new file mode 100644 index 00000000000..2ab886a2576 --- /dev/null +++ b/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py @@ -0,0 +1,76 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.nuke.lib import NukeDeadlinePublishTestClass + +log = logging.getLogger("test_publish_in_nuke") + + +class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): + """Basic test case for publishing in Nuke + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Opens Nuke, run publish on prepared workile. + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + How to run: + (in cmd with activated {OPENPYPE_ROOT}/.venv) + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py + runtests ../tests/integration/hosts/nuke # noqa: E501 + + To check log/errors from launched app's publish process keep PERSIST + to True and check `test_openpype.logs` collection. + """ + # https://drive.google.com/file/d/1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI/view?usp=sharing # noqa: E501 + TEST_FILES = [ + ("1SeWprClKhWMv2xVC9AcnekIJFExxnp_b", + "test_nuke_deadlinepublish.zip", "") + ] + + APP = "nuke" + + TIMEOUT = 120 # publish timeout + + # could be overwritten by command line arguments + # keep empty to locate latest installed variant or explicit + APP_VARIANT = "" + PERSIST = False # True - keep test_db, test_openpype, outputted test files + TEST_DATA_FOLDER = None + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="renderCompositingInNukeMain")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 4)) + + additional_args = {"context.subset": "renderCompositingInNukeMain", + "context.ext": "exr"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestDeadlinePublishInNuke() From 9159b67051fb5d69d6b389a428d0883204a83418 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 17 Oct 2022 15:45:06 +0200 Subject: [PATCH 10/68] OP-3426 - fix source zip file for Nuke --- tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py index 2ab886a2576..ddeae4bfa0d 100644 --- a/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py @@ -29,7 +29,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): # https://drive.google.com/file/d/1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI/view?usp=sharing # noqa: E501 TEST_FILES = [ ("1SeWprClKhWMv2xVC9AcnekIJFExxnp_b", - "test_nuke_deadlinepublish.zip", "") + "test_nuke_deadline_publish.zip", "") ] APP = "nuke" From 8b29406c062140df22995f66912dd0b7e62bf230 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 12:02:33 +0200 Subject: [PATCH 11/68] OP-3426 - cleaned up test_projece DB dump Now contains only project and single asset document --- tests/resources/test_data.zip | Bin 7350 -> 4979 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/tests/resources/test_data.zip b/tests/resources/test_data.zip index 0faab86b37d5c7d1224e8a92cca766ed80536718..b37a30ed7a73b96d2c6398207657607b63fac0bc 100644 GIT binary patch delta 2196 zcmZXVc{JN;7sr2s)DonXSdv&G$1arCR!S{Fg|W1#u~iz>P8SVh8e2k9EkFA{C{v7T zm5Q~fJuix(sGU}&V=sz!hR%8adEa}UyPWfU&%O6~{`u5tZ_89e*+6Ykc+EQwHOOg8 zH(NVyAKrW>6ae@I0YC+WLq$OmK_r!QJ1}7Sno}T}fY5u+hhG*_MyBT2ic3ab@NM+Z zzUn`D-Y3W0A64rejiPyc7Mn5})9GjWk#0Dy;*J0gl_My{pb1z=mH^^}W3FPcGy80k z!2_QsH!n<;<0b9(JI{SoJ3h0y7YlSbtGkinP20q;6@fF(2VozI1D`EZEo7Ifg#kn9 z*=2QhMntWaZW5xSHu!LWf%HhGRqJ!>myFgzqsHQI--3OdgYn;b zxI~qL-`+%J;tSXK?^;r*rQDi(03tE9B_y<(Q^4`B+$#0Zgtq&b{>P`V2OlXU+s%zT z+*|6RqXhQ!E}ISG7-dCs>o_xjREoS{gH`|DrM z&(|I@J8;a>IrH5t)44RZ?aLU)?YjD6zKR13XHq+nosGxgib+0|E9Oc*UYxMYuA8)RKpSD+x8YUt%r2O9J|;1Gg|3443XhoV}*=W|&y>@0aNzav5`- zkwZ7M;vt<`>lk@^&!BW+{Mejfd$&kgle(B5&LNOk>nMD4p`ZlsYp6W=SZNQBl@Pk< z>XrQH0TB|+G^EmP^kX12|I?IPPTi_16bmdiYhNmE@*BDE596VF)C<%pU8rQnIhv!? zBz=1>@vN}w)5|}*X;Y!MG^RsrSS{rA^Gz`-=wFAWQpf-U-}Z`O&dT;2Er(j{%d0IR z12=YQl8K4*yBF%?y8Ex!Y+}S_``(#Vwz`lz_k`m$spg%GhrL5ybhr60Pd{nOSmpWA zOiNen4{g1DR|b^pD_MQ`s?-h2T)`9_N2mb_;vN8#0>}Qqh>R?HN4YsMksn}eNzZw zsP7YZsayCtS!F`XRia}&wQz3OrL><`g_CkHRk#d~{t~B?{eUV#CW2#-|1DS1>&|K0JK2tSXOlQc`;Iy6-Wk2KAub7+=f!a_0N$-GroP zW%keJN6rMf2~1U@?08dSZ4(5hQ;ju{%{Qz?NZQfnblqHv2FmvBS^;a>p?J8`TZG@sP#0F zmW(xE)-LJ$YKfWrzPR2kjR=*lUP2VNGA_JqzKIkK$Jwj}LFfd~YX?@+>3V1-8>0 zkt;n>bMLRA^3o-1)!vOk>K{=%+!0nOP?yNNB>$bU7u2{Sr$7(e9&usXI(N=s!@g~f?vm@L zynk-+vgy(j(ud_89(>EyNi1&}vMh7*!p{pDncDSFXgiaJhVx+N)R)6D?kTv^oex$P zvPrGP#T28qiW`6HvqJjrXUayljY==7G1~=xG0%(?xj5ZS#vO$fJt|wLzqr0U`vr8- z-yLe7ro=6cv@kMW3q};j>TGa2k{jXd!)1TlLA3V7c3*n$8~6+z-N!~0s@IKVFw%BB zss#SjA?Hu?EO_#@QGL{`!jMLcs z;awAdg)(rVZWgtw-%y;FQ%N@uS(k3K1gBXO-fM|3>2xQvj^-zC#Rtp=xpv=-l^wWl zXj&85IS>E@-~vtnNE!hregy!a;Mlc}{f@`&JD@Q1%rOV?)EDTI(|vsJDQqL~$XO2m zwwKq>$21ZbhnI7*njjLGX7A_*p{7C}eyj?8?6#S;R#6CL$V+&#QGd|d2_C>(`xf%> zLP$@j6*do^Qm;ziYtAi=Jm*=d^*qv9rG#(_{V~gLOV#T2ujY4g#KrYJwk*O9=aMDFP~> z5R(6g_T!iRYm&!F?_c3D>A?gP{D=5|zu`p82Aqf;cz$#h5tKR}f`N;k-25>3aa7<1 zv1kIs{Uo8mXtV}I5e7D*4Ir*C@H^TVk_rR$kYb=aMh?;s0~0V}{BQscv~mDIKL1IR Fe*i~{;(Gu9 delta 4380 zcmZ{n1yB@Tx4@UB(`8vYrDJKNyPFjdK|*S21ZfahT0oXsKxp0S6 zkl?ynXTSyLFYI6VdqY2{ceEg~1bKrh9!wz(C^3rI@?<7Mf`!cQA-ADu$Ey%2WzCBI z;IotMmmcQ`KdTn2?ol)kRfMrv0%G)4^iu=o;v@=1e4tkX$g8EbU1_?4C+r92QB;;0 zwKD+*A`Rhw)tqo(&7eiEI8oRI0i>^L&&FC%wZ(T-YEL~OjyzY#Rt2w;NYiX*mHE~kfXq{-ljKJT8q44)s2?j~SAdXi;h&cAHAn@G4 zZ!3i5r+wY!0cE}BnKuKeKG+qbAtEoY$VlH9P$v8IvT`>M@DjX_QRb!Uj6q&ao_hag zsFl@2odbVuUG5=uUbts;R5$#UhcOp*vw)nh%KM&d`P z3DJA>_C%5QTr$JzWU~qUUlRD|UG9&X5h!&13}K1GGz{@=kl-;&bmD z%9NLhIYvshw$QGcaz?iVf#tT^(Li`NOtTtDd_5NuW}uJ~$iQ3^SC#Zgbk=`{GuU06 zu!mVCV3hR^vtr->TOyrrd%TJ<_`^os#F^x?an6n&m<~1$1ONmO0Bqv74XcA$87lz* z!1bRY{7=2Q!+mV*ZG3D5T>ca;JZX$9LI|Yx8@{e@OOysP-S0;#Sd;A92_Z{36iKzO z{mob2)OBs6yE1Vt6r3G3%BYh&j~^<@kup)xT+-XMH*`}-GUQTo;v_R^X1*4K)5;xc z;kp7D^M9ioawAMY%GRVX^JvkS@O|Kqy)KB(p85*;N%IKw9}!n+JV+vf_?n@YZ#E+`02J`|4Q}f>px9+ zED8V!{<#<}Dw&)T@wX2TpIEvvJf`fH=@4@#`rL@r)*(7Fh|dj@6Wzb8k@#ppQ5lD= zZwuy98jXQ>di(pYFJ$iCN%}Dqcc#dng)LIEGX(&c!l(za71qf?EXe>YaD{O=NU$tP z78d7y)Fmn-nzs^=Yf3YdUW8u$W7s|p-Chl2*C54{tEhq6k{kaPt zed^5}m_tuZQu41$H(UEJ4+ML9m9J?yR z^S9e|jEIHfE($h*OE=LI(xt3o^^OfxCJ=pyw0hvvqjGe__I7otdG|qCld}AvgN{{$ zN(Y@3V6s_-AOFBxwahpt%UHA9*_^*vZ%f^}c`Jea}}!Gedy`^6#_Z1?5eXKDELIq37qp{1;Mye4#69NDaEZl`j} zq1(YoHr}7?PVPKZ&{CG2v+o~qlv+aTfUtdov0N;zbmZqck@I52mAF0+>@PYMi=Pa5 z!&QSQETi{FbIEiC!B#(1kd?(dr>5^x6NoU? zZ874a0v-OEyFd$AyU>d(WtE(PaWW6QJP-erm%~H-*li)d&TRYLJqbv@=}V2)-DT>r zxo}9Ot({c$0xGe3gD`>s1n%o@avFgzlMRK;m`o1@{=0(JH%d!&vY{YKzY&ME>+6dO z*8=ZT=*AjwW))**gl(<>v`Ek=LX2f}VNzZ&bbJ6`M+rF94YE`_IRMZ?nvX5<;9i6A zzlN(o+k-SPzG9sig~d;3D(qI5^<;^|8f@4mEp&B(T~im6wU&@rUTkI5Fh#aO^78}u zg>$%vgyj<9yFxhiN|*@L7;L>+j0X)>c3^m9V$P2nT1}-gY`|sWq3lwRDBB(K^;^;* z;pPIr8rF|r)eETA)+WC-8ky*vp6WyD5impsWm8&b{IpxH7W8ak(rowb!-cG5IeuII zlE=urV`jY7=nyo}YBJNr@#q_}h66WI^Nz$jR;%|rk|tQt^E!_n@J$DD(3C-=-Rp0G zaRcOU_C+TEUt@KnJgsaI`m37kdT?u*GV7(2MpYb7R3(Wt?%;aV0^9qt9Bb0>X6ESU zH$iI*gI8xz-xdpcD ztkuXk*8ZMh&UsGYiEvK1#;SDcGq;g_R{Ppw!|Ozj$}Mpj*Ao(u-cM3mvrXTGZBkQ$ zdy5Y(ICJ78D(UG-`&L#HkeVoDG16BELZw1K%r$pk+lJB)5hHXP`B_(F{_ufCP-G#A z4pmRmXb4=!OQtW?Ie;;+tHhU-g3zjNkXvpoO4&OS0&Ym_ zdg5zL3R&&K-e|ja;xX%YzqL%xW$mx+P*#@!&(KI!^(Q~aCwp=vJZf$yqo?H=lcYoO zOcC?X*=*fM@zuH#E^+sFaw1b#(`GH6b@*BD>Mfa3ys#}ax8?#7?{c+uCq>PqCMEZa(vr4)4A8&npRkRIsT>IA4@1Uc={N#uA^0dIvD;6|7+OZx&oAaG#V-VWGoL2N>x=$o& z;g_sKUGgY~`k@~j`RoXFV8p4HAR(pCp0&Hv1LTQ*Jkq^&kPWw1E5pZw6q`FJ(cPThfZ`$&jbs_^WOysKNh zomj`^QcP~`9c@*OQhjisq61yyJPy=PqdKjWZ^p%p$Dqupgr`Z`TZ7*q{iBN>2X5<1 zcwn6pa7&6o4Oa&zw18ebFyXt-Ma&V3`Pceme~)dPS&MPSj8BZ}8QBv@YE+1{Lu~t- zd_}6G3jX`Vc8pt%G%NS@Gkw(UgZ=;0vZLOB};X~^qB9{E=A2($xh&YVl zrlvQJ3?|*V54*b?$IsF?i2Zav4Wri>;9VO6IO^rKEo3|eOyb!p#8Hld;Q zwJSF!Z{E_GJM;=?%f4Vd@m)Tzfr1wv1bHfC!6^RIlM?fmGjc}1M^e`@dy-^CoYs{c zr3){rn1z&>EI4EYZoICCvN?tgUXw-OgbpA2zElW{XN-=={_PJ-AwkqCg$a*i6~Dov zsXe0Vy`BqY1=t-a0g9y`R?NDXW@I>|W!kK*XFqQZkir(=^lL5MS>)@$<%Xqo5kOv) zm97db8tV0#;w!2~s|?!Xsp|gnzYpZt2H7OO1U>iDj17CR4tmJ<+A(eVg7Hx~_oj{T zy20~=>!5@6ALcfKf!PS^$DM7_-%y5+=U!7hp2N03f9iYHR^#WLy=laJ&`wvL*xqVCkp!Uak; z0aM$ckC_UBFqhSL>3e12%CxkG(uU}asHsxtwzb@yCsGoIi(hlN*vhx5NTi{}yE_p* zzvNjDAIi0O)!*sBK5WxS# zTz|OhuV4XK{^(w8RQNrblw(5hzk~k=g`u|Y@dDd%{^Nl11QMcTK?1-_+95d|8b^7X_A8o|9N8@a!Mcu{GUZ#U@HacE4do* y`0qrIQUr?w+aRa_uoCb9g6aUP0$C|hSQKhNvp?HVFp_}*V1N%U005=_o8n*3r4_3H From 7c904861fd36a623547b28d99ab15319476fd59b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 12:35:59 +0200 Subject: [PATCH 12/68] OP-3426 - test class for Maya and Deadline --- .../maya/test_deadline_publish_in_maya.py | 49 ++++++++++++++----- 1 file changed, 36 insertions(+), 13 deletions(-) diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index f14310a16cd..5621a038330 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -4,15 +4,13 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): """Basic test case for publishing in Maya - Shouldnt be running standalone only via 'runtests' pype command! (??) - - Uses generic TestCase to prepare fixtures for test data, testing DBs, - env vars. Always pulls and uses test data from GDrive! Opens Maya, runs publish on prepared workile. + Sends file to be rendered on Deadline. + Then checks content of DB (if subset, version, representations were created. Checks tmp folder if all expected files were published. @@ -22,10 +20,11 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/maya # noqa: E501 """ - PERSIST = True + PERSIST = False TEST_FILES = [ - ("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "") + ("1dDY7CbdFXfRksGVoiuwjhnPoTRCCf5ea", + "test_maya_deadline_publish.zip", "") ] APP = "maya" @@ -37,7 +36,7 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 5 == dbcon.count_documents({"type": "version"}), \ + assert 3 == dbcon.count_documents({"type": "version"}), \ "Not expected no of versions" assert 0 == dbcon.count_documents({"type": "version", @@ -48,22 +47,46 @@ def test_db_asserts(self, dbcon, publish_finished): "name": "modelMain"}), \ "modelMain subset must be present" + assert 1 == dbcon.count_documents({ + "type": "subset", "name": "renderTestTaskMain_beauty"}), \ + "renderTestTaskMain_beauty subset must be present" + assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTest_task"}), \ - "workfileTest_task subset must be present" + "name": "workfileTesttask"}), \ + "workfileTesttask subset must be present" - assert 11 == dbcon.count_documents({"type": "representation"}), \ + assert 6 == dbcon.count_documents({"type": "representation"}), \ "Not expected no of representations" - assert 2 == dbcon.count_documents({"type": "representation", + assert 1 == dbcon.count_documents({"type": "representation", "context.subset": "modelMain", "context.ext": "abc"}), \ "Not expected no of representations with ext 'abc'" - assert 2 == dbcon.count_documents({"type": "representation", + assert 1 == dbcon.count_documents({"type": "representation", "context.subset": "modelMain", "context.ext": "ma"}), \ - "Not expected no of representations with ext 'abc'" + "Not expected no of representations with ext 'ma'" + + assert 1 == dbcon.count_documents({"type": "representation", + "context.subset": "workfileTesttask", # noqa + "context.ext": "mb"}), \ + "Not expected no of representations with ext 'mb'" + + assert 1 == dbcon.count_documents({"type": "representation", + "context.subset": "renderTestTaskMain_beauty", # noqa + "context.ext": "exr"}), \ + "Not expected no of representations with ext 'exr'" + + assert 1 == dbcon.count_documents({"type": "representation", + "context.subset": "renderTestTaskMain_beauty", # noqa + "context.ext": "jpg"}), \ + "Not expected no of representations with ext 'jpg'" + + assert 1 == dbcon.count_documents({"type": "representation", + "context.subset": "renderTestTaskMain_beauty", # noqa + "context.ext": "h264_exr"}), \ + "Not expected no of representations with ext 'h264_exr'" if __name__ == "__main__": From 2436fabd7f695dc55461f349258ea662eccf3bbd Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 12:54:00 +0200 Subject: [PATCH 13/68] OP-3426 - update db asserts for simple Maya test --- tests/integration/hosts/maya/test_publish_in_maya.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 39b3f122633..a5a3e35b26a 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -37,7 +37,7 @@ class TestPublishInMaya(MayaLocalPublishTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 5 == dbcon.count_documents({"type": "version"}), \ + assert 2 == dbcon.count_documents({"type": "version"}), \ "Not expected no of versions" assert 0 == dbcon.count_documents({"type": "version", @@ -52,7 +52,7 @@ def test_db_asserts(self, dbcon, publish_finished): "name": "workfileTest_task"}), \ "workfileTest_task subset must be present" - assert 11 == dbcon.count_documents({"type": "representation"}), \ + assert 5 == dbcon.count_documents({"type": "representation"}), \ "Not expected no of representations" assert 2 == dbcon.count_documents({"type": "representation", @@ -63,7 +63,12 @@ def test_db_asserts(self, dbcon, publish_finished): assert 2 == dbcon.count_documents({"type": "representation", "context.subset": "modelMain", "context.ext": "ma"}), \ - "Not expected no of representations with ext 'abc'" + "Not expected no of representations with ext 'ma'" + + assert 1 == dbcon.count_documents({"type": "representation", + "context.subset": "workfileTest_task", # noqa + "context.ext": "mb"}), \ + "Not expected no of representations with ext 'mb'" if __name__ == "__main__": From eb772dd05af4bc29a947ec2e7249311aa871f285 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 12:58:03 +0200 Subject: [PATCH 14/68] OP-3426 - update db asserts for PS Removed integrate of thumbnails by default, eg less representations. --- tests/integration/hosts/photoshop/test_publish_in_photoshop.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py index 5387bbe51e1..d4ab3e77346 100644 --- a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py @@ -72,7 +72,7 @@ def test_db_asserts(self, dbcon, publish_finished): name="workfileTest_task")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 8)) + DBAssert.count_of_types(dbcon, "representation", 6)) additional_args = {"context.subset": "imageMainForeground", "context.ext": "png"} From c27d8a26786fe393f6fe83f518497469c4e39ecf Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 13:05:51 +0200 Subject: [PATCH 15/68] OP-3426 - refactor - changed style of db asserts for Maya --- .../maya/test_deadline_publish_in_maya.py | 107 +++++++++--------- .../hosts/maya/test_publish_in_maya.py | 66 +++++------ 2 files changed, 90 insertions(+), 83 deletions(-) diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index 5621a038330..73649455817 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -1,3 +1,4 @@ +from tests.lib.assert_classes import DBAssert from tests.integration.hosts.maya.lib import MayaDeadlinePublishTestClass @@ -36,57 +37,61 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 3 == dbcon.count_documents({"type": "version"}), \ - "Not expected no of versions" - - assert 0 == dbcon.count_documents({"type": "version", - "name": {"$ne": 1}}), \ - "Only versions with 1 expected" - - assert 1 == dbcon.count_documents({"type": "subset", - "name": "modelMain"}), \ - "modelMain subset must be present" - - assert 1 == dbcon.count_documents({ - "type": "subset", "name": "renderTestTaskMain_beauty"}), \ - "renderTestTaskMain_beauty subset must be present" - - assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTesttask"}), \ - "workfileTesttask subset must be present" - - assert 6 == dbcon.count_documents({"type": "representation"}), \ - "Not expected no of representations" - - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "abc"}), \ - "Not expected no of representations with ext 'abc'" - - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "ma"}), \ - "Not expected no of representations with ext 'ma'" - - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "workfileTesttask", # noqa - "context.ext": "mb"}), \ - "Not expected no of representations with ext 'mb'" - - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "renderTestTaskMain_beauty", # noqa - "context.ext": "exr"}), \ - "Not expected no of representations with ext 'exr'" - - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "renderTestTaskMain_beauty", # noqa - "context.ext": "jpg"}), \ - "Not expected no of representations with ext 'jpg'" - - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "renderTestTaskMain_beauty", # noqa - "context.ext": "h264_exr"}), \ - "Not expected no of representations with ext 'h264_exr'" + failures = [] + failures.append(DBAssert.count_of_types(dbcon, "version", 3)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="modelMain")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="renderTestTaskMain_beauty")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTesttask")) + + failures.append(DBAssert.count_of_types(dbcon, "representation", 6)) + + additional_args = {"context.subset": "modelMain", + "context.ext": "abc"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "modelMain", + "context.ext": "ma"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "modelMain", + "context.ext": "mb"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTestTaskMain_beauty", + "context.ext": "exr"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTestTaskMain_beauty", + "context.ext": "jpg"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTestTaskMain_beauty", + "context.ext": "h264_exr"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) if __name__ == "__main__": diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index a5a3e35b26a..bff7ccb2f7c 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -1,3 +1,4 @@ +from tests.lib.assert_classes import DBAssert from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass @@ -37,38 +38,39 @@ class TestPublishInMaya(MayaLocalPublishTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 2 == dbcon.count_documents({"type": "version"}), \ - "Not expected no of versions" - - assert 0 == dbcon.count_documents({"type": "version", - "name": {"$ne": 1}}), \ - "Only versions with 1 expected" - - assert 1 == dbcon.count_documents({"type": "subset", - "name": "modelMain"}), \ - "modelMain subset must be present" - - assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTest_task"}), \ - "workfileTest_task subset must be present" - - assert 5 == dbcon.count_documents({"type": "representation"}), \ - "Not expected no of representations" - - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "abc"}), \ - "Not expected no of representations with ext 'abc'" - - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "ma"}), \ - "Not expected no of representations with ext 'ma'" - - assert 1 == dbcon.count_documents({"type": "representation", - "context.subset": "workfileTest_task", # noqa - "context.ext": "mb"}), \ - "Not expected no of representations with ext 'mb'" + failures = [] + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="modelMain")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append(DBAssert.count_of_types(dbcon, "representation", 5)) + + additional_args = {"context.subset": "modelMain", + "context.ext": "abc"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "modelMain", + "context.ext": "ma"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "workfileTest_task", + "context.ext": "mb"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) if __name__ == "__main__": From 0056be47b1c367a2fa9b99a94b2b1443a92e27a5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 18 Oct 2022 13:23:22 +0200 Subject: [PATCH 16/68] OP-3426 - fix - wrong testing class used --- .../aftereffects/test_publish_in_aftereffects_multiframe.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py index c882e0f9b2c..dd61e72c6f5 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -1,12 +1,12 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects Should publish 5 frames From b98e03726df930ed2b4043a547cb49e222c16300 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 12:27:24 +0200 Subject: [PATCH 17/68] OP-4180 - renamed DL testing class for Nuke Updated test scene name --- tests/integration/hosts/nuke/lib.py | 2 +- ...ublish_in_nuke.py => test_deadline_publish_in_nuke.py} | 8 ++++++++ tests/integration/hosts/nuke/test_publish_in_nuke.py | 8 ++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) rename tests/integration/hosts/nuke/{test_deadine_publish_in_nuke.py => test_deadline_publish_in_nuke.py} (90%) diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index e3c610199b0..ed2af38d1c5 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -15,7 +15,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. """ - source_file_name = "test_project_test_asset_CompositingInNuke_v001.nk" + source_file_name = "test_project_test_asset_TestTask_v001.nk" src_path = os.path.join(download_test_data, "input", "workfile", diff --git a/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py similarity index 90% rename from tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py rename to tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py index ddeae4bfa0d..98b463b9ae7 100644 --- a/tests/integration/hosts/nuke/test_deadine_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py @@ -12,6 +12,14 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. + !!! + It expects modified path in WriteNode, + use '[python {nuke.script_directory()}]' instead of regular root + dir (eg. instead of `c:/projects`). + Access file path by selecting WriteNode group, CTRL+Enter, update file + input + !!! + Opens Nuke, run publish on prepared workile. Then checks content of DB (if subset, version, representations were diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 88defdc37a7..a3947a30ae5 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -12,6 +12,14 @@ class TestPublishInNuke(NukeLocalPublishTestClass): Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. + !!! + It expects modified path in WriteNode, + use '[python {nuke.script_directory()}]' instead of regular root + dir (eg. instead of `c:/projects/test_project/test_asset/test_task`). + Access file path by selecting WriteNode group, CTRL+Enter, update file + input + !!! + Opens Nuke, run publish on prepared workile. Then checks content of DB (if subset, version, representations were From 29ca3f85566fb321a4bbc531d2a64fda4b2dc989 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 12:29:20 +0200 Subject: [PATCH 18/68] OP-4180 - change output folder It should be in `output` subfolder, not in root --- tests/integration/hosts/aftereffects/lib.py | 1 + tests/integration/hosts/maya/lib.py | 1 + tests/lib/testing_classes.py | 3 ++- 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index ca637edba68..0f7513c7d37 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -21,6 +21,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): "workfile", "test_project_test_asset_TestTask_v001.aep") dest_folder = os.path.join(download_test_data, + "output", self.PROJECT, self.ASSET, "work", diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index 6610fac118a..a0c52141818 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -21,6 +21,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): "workfile", "test_project_test_asset_TestTask_v001.mb") dest_folder = os.path.join(output_folder_url, + "output", self.PROJECT, self.ASSET, "work", diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 53eeae10cbd..f0899e3e189 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -317,7 +317,7 @@ def test_folder_structure_same(self, dbcon, publish_finished, Compares only presence, not size nor content! """ - published_dir_base = output_folder_url + published_dir_base = os.path.join(output_folder_url, "output") expected_dir_base = os.path.join(download_test_data, "expected") @@ -351,6 +351,7 @@ def publish_finished(self, dbcon, launched_app, download_test_data, raise ValueError("Timeout reached") metadata_json = glob.glob(os.path.join(download_test_data, + "output", "**/*_metadata.json"), recursive=True) if not metadata_json: From 76de4e4bbba887b3f8fa96a950d51a26cadb4d45 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 13:39:57 +0200 Subject: [PATCH 19/68] OP-3426 - added support for automatic tests into GlobalJobPreLoad Jobs sent to DL must propagate flag IS_TEST to note that job is an automatic tests, it should use different DBs from regular jobs. --- igniter/tools.py | 3 ++- .../publish/submit_aftereffects_deadline.py | 3 ++- .../plugins/publish/submit_harmony_deadline.py | 3 ++- .../plugins/publish/submit_maya_deadline.py | 3 ++- .../plugins/publish/submit_publish_job.py | 5 ++++- .../custom/plugins/GlobalJobPreLoad.py | 3 +++ start.py | 16 ++++++++++++++-- .../hosts/maya/test_deadline_publish_in_maya.py | 2 +- 8 files changed, 30 insertions(+), 8 deletions(-) diff --git a/igniter/tools.py b/igniter/tools.py index a9d592acf03..bd812edb92f 100644 --- a/igniter/tools.py +++ b/igniter/tools.py @@ -153,7 +153,8 @@ def get_openpype_global_settings(url: str) -> dict: # Create mongo connection client = MongoClient(url, **kwargs) # Access settings collection - col = client["openpype"]["settings"] + openpype_db = os.environ.get("OPENPYPE_DATABASE_NAME") or "openpype" + col = client[openpype_db]["settings"] # Query global settings global_settings = col.find_one({"type": "global_settings"}) or {} # Close Mongo connection diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index 0c1ffa6bd74..bdf492bdcb6 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -83,7 +83,8 @@ def get_job_info(self): "AVALON_APP_NAME", "OPENPYPE_DEV", "OPENPYPE_LOG_NO_COLORS", - "OPENPYPE_VERSION" + "OPENPYPE_VERSION", + "IS_TEST" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index 6327143623e..b90c41d67b8 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -275,7 +275,8 @@ def get_job_info(self): "AVALON_APP_NAME", "OPENPYPE_DEV", "OPENPYPE_LOG_NO_COLORS", - "OPENPYPE_VERSION" + "OPENPYPE_VERSION", + "IS_TEST" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index c669f9a8140..315c9461c2f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -158,7 +158,8 @@ def get_job_info(self): "AVALON_TASK", "AVALON_APP_NAME", "OPENPYPE_DEV", - "OPENPYPE_VERSION" + "OPENPYPE_VERSION", + "IS_TEST" ] # Add mongo url if it's enabled if self._instance.context.data.get("deadlinePassMongoUrl"): diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 0e418ccf778..e67b1cd00ec 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -142,7 +142,9 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "OPENPYPE_RENDER_JOB", "OPENPYPE_PUBLISH_JOB", "OPENPYPE_MONGO", - "OPENPYPE_VERSION" + "OPENPYPE_VERSION", + + "IS_TEST" ] # custom deadline attributes @@ -247,6 +249,7 @@ def _submit_deadline_post_job(self, instance, job, instances): environment["OPENPYPE_USERNAME"] = instance.context.data["user"] environment["OPENPYPE_PUBLISH_JOB"] = "1" environment["OPENPYPE_RENDER_JOB"] = "0" + environment["IS_TEST"] = os.environ.get("IS_TEST") # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): mongo_url = os.environ.get("OPENPYPE_MONGO") diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 61b95cf06da..745a91ba81b 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -176,6 +176,9 @@ def inject_openpype_environment(deadlinePlugin): add_args['app'] = job.GetJobEnvironmentKeyValue('AVALON_APP_NAME') add_args["envgroup"] = "farm" + if job.GetJobEnvironmentKeyValue('IS_TEST'): + add_args["automatic_tests"] = "true" + if all(add_args.values()): for key, value in add_args.items(): args.append("--{}".format(key)) diff --git a/start.py b/start.py index d1198a85e48..1d14a7def85 100644 --- a/start.py +++ b/start.py @@ -486,6 +486,7 @@ def _process_arguments() -> tuple: use_version = None use_staging = False commands = [] + automatic_tests = False # OpenPype version specification through arguments use_version_arg = "--use-version" @@ -570,7 +571,11 @@ def _process_arguments() -> tuple: sys.argv.pop(idx) sys.argv.insert(idx, "tray") - return use_version, use_staging, commands + if "--automatic_tests" in sys.argv: + sys.argv.remove("--automatic_tests") + automatic_tests = True + + return use_version, use_staging, commands, automatic_tests def _determine_mongodb() -> str: @@ -997,7 +1002,7 @@ def boot(): # Process arguments # ------------------------------------------------------------------------ - use_version, use_staging, commands = _process_arguments() + use_version, use_staging, commands, automatic_tests = _process_arguments() if os.getenv("OPENPYPE_VERSION"): if use_version: @@ -1024,6 +1029,13 @@ def boot(): os.environ["OPENPYPE_DATABASE_NAME"] = \ os.environ.get("OPENPYPE_DATABASE_NAME") or "openpype" + if automatic_tests: + # change source DBs to predefined ones set for automatic testing + os.environ["IS_TEST"] = "1" + os.environ["OPENPYPE_DATABASE_NAME"] += "_tests" + avalon_db = os.environ.get("AVALON_DB") or "avalon" + os.environ["AVALON_DB"] = avalon_db + "_tests" + global_settings = get_openpype_global_settings(openpype_mongo) _print(">>> run disk mapping command ...") diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index 73649455817..16ff48d7fdb 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -21,7 +21,7 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/maya # noqa: E501 """ - PERSIST = False + PERSIST = True TEST_FILES = [ ("1dDY7CbdFXfRksGVoiuwjhnPoTRCCf5ea", From 5a885ac2a7cb777e6e6f5836c31c1bdfaa04b3bb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 13:50:41 +0200 Subject: [PATCH 20/68] OP-3426 - changed source DBs It might be better to enhance testing DBs with _tests suffix and control it with single argument (or env var). --- tests/lib/testing_classes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index f0899e3e189..946ad671976 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -38,9 +38,9 @@ class ModuleUnitTest(BaseTest): PERSIST = False # True to not purge temporary folder nor test DB TEST_OPENPYPE_MONGO = "mongodb://localhost:27017" - TEST_DB_NAME = "test_db" + TEST_DB_NAME = "avalon_tests" TEST_PROJECT_NAME = "test_project" - TEST_OPENPYPE_NAME = "test_openpype" + TEST_OPENPYPE_NAME = "openpype_tests" TEST_FILES = [] From fde0a02621fb98e89183ef6275f70dd3f57b1b96 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 14:44:36 +0200 Subject: [PATCH 21/68] OP-3426 - add automatic_tests argument to cli for extractenvironments --- openpype/cli.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/cli.py b/openpype/cli.py index 398d1a94c09..5f0fd1de7bf 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -127,7 +127,11 @@ def webpublisherwebserver(executable, upload_dir, host=None, port=None): @click.option( "--envgroup", help="Environment group (e.g. \"farm\")", default=None ) -def extractenvironments(output_json_path, project, asset, task, app, envgroup): +@click.option( + "--automatic_tests", help="Is this automatic test", default=None +) +def extractenvironments(output_json_path, project, asset, task, app, envgroup, + automatic_tests): """Extract environment variables for entered context to a json file. Entered output filepath will be created if does not exists. From 8f3b6e5c0e98f822f7c7b53e3361ae245ff89f2b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 15:15:03 +0200 Subject: [PATCH 22/68] OP-3426 - don't remove from sys.args Cli should have automatic_tests argument even if it is not pushed through, just for better documentation of command with --help --- start.py | 1 - 1 file changed, 1 deletion(-) diff --git a/start.py b/start.py index 1d14a7def85..5242a50f147 100644 --- a/start.py +++ b/start.py @@ -572,7 +572,6 @@ def _process_arguments() -> tuple: sys.argv.insert(idx, "tray") if "--automatic_tests" in sys.argv: - sys.argv.remove("--automatic_tests") automatic_tests = True return use_version, use_staging, commands, automatic_tests From f501c63369dfa584dbd25aca09845bd71d312dc2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 15:42:34 +0200 Subject: [PATCH 23/68] OP-3426 - propagate automatic_tests for publish process on DL --- openpype/cli.py | 5 ++++- openpype/hosts/nuke/api/plugin.py | 2 ++ .../deadline/plugins/publish/submit_publish_job.py | 3 +++ tests/lib/db_handler.py | 8 ++++---- 4 files changed, 13 insertions(+), 5 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 5f0fd1de7bf..add9d9d36e3 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -152,7 +152,10 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup, multiple=True) @click.option("-g", "--gui", is_flag=True, help="Show Publish UI", default=False) -def publish(paths, targets, gui): +@click.option( + "--automatic_tests", help="Is this automatic test", default=None +) +def publish(paths, targets, gui, automatic_tests): """Start CLI publishing. Publish collects json from paths provided as an argument. diff --git a/openpype/hosts/nuke/api/plugin.py b/openpype/hosts/nuke/api/plugin.py index 91bb90ff996..a7b0efabeed 100644 --- a/openpype/hosts/nuke/api/plugin.py +++ b/openpype/hosts/nuke/api/plugin.py @@ -409,6 +409,8 @@ def save_file(self): # create nk path path = os.path.splitext(self.path)[0] + ".nk" # save file to the path + if not os.path.exists(os.path.dirname(path)): + os.makedirs(os.path.dirname(path)) shutil.copyfile(self.instance.context.data["currentFile"], path) self.log.info("Nodes exported...") diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index e67b1cd00ec..ab3cd091794 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -266,6 +266,9 @@ def _submit_deadline_post_job(self, instance, job, instances): "--targets", "farm" ] + if os.environ.get("IS_TEST"): + args["automatic_tests"] = "1" + # Generate the payload for Deadline submission payload = { "JobInfo": { diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index 108ebf560d9..ae460e310b7 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -228,12 +228,12 @@ def _import_query(self, uri, sql_url, return query # Examples -# handler = DBHandler(uri="mongodb://localhost:27017") +handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps" +backup_dir = "c:\\projects\\test_zips\\test_maya_publish\\input\\dumps" # # # -# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -# handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") +#handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") +handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") # handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") From b996acaf9c82d23bfdc5d31fd499af7254a9485d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 15:44:50 +0200 Subject: [PATCH 24/68] OP-3426 - disable unwanted commands Enabling only for test preparation. Updated test_db to currently used avalon_tests --- tests/lib/db_handler.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index ae460e310b7..b80a296b607 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -228,15 +228,15 @@ def _import_query(self, uri, sql_url, return query # Examples -handler = DBHandler(uri="mongodb://localhost:27017") +# handler = DBHandler(uri="mongodb://localhost:27017") # # -backup_dir = "c:\\projects\\test_zips\\test_maya_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_maya_publish\\input\\dumps" # # # -#handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") -# handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql", +# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon", collection="test_project") +# handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") -# handler.setup_from_sql("test_db", "c:\\projects\\sql", +# handler.setup_from_sql("avalon_tests", "c:\\projects\\sql", # collection="test_project", # drop=False, mode="upsert") From e9dfcf64bb8dc646fea107c18f5eb3bcef634a63 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 16:07:57 +0200 Subject: [PATCH 25/68] OP-3426 - added date string to batch name It would be better to have separate batches for automatic tests than single batch with a lot of runs. --- .../plugins/publish/submit_aftereffects_deadline.py | 7 +++++-- .../plugins/publish/submit_harmony_deadline.py | 6 +++++- .../plugins/publish/submit_houdini_remote_publish.py | 3 +++ .../plugins/publish/submit_houdini_render_deadline.py | 4 ++++ .../deadline/plugins/publish/submit_maya_deadline.py | 3 +++ .../publish/submit_maya_remote_publish_deadline.py | 3 +++ .../deadline/plugins/publish/submit_nuke_deadline.py | 10 +++++++--- 7 files changed, 30 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index bdf492bdcb6..c6b94c2ce80 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -2,6 +2,7 @@ import attr import getpass import pyblish.api +from datetime import datetime from openpype.lib import ( env_value_to_bool, @@ -48,9 +49,11 @@ def get_job_info(self): context = self._instance.context + batch_name = os.path.basename(self._instance.data["source"]) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") dln_job_info.Name = self._instance.data["name"] - dln_job_info.BatchName = os.path.basename(self._instance. - data["source"]) + dln_job_info.BatchName = batch_name dln_job_info.Plugin = "AfterEffects" dln_job_info.UserName = context.data.get( "deadlineUser", getpass.getuser()) diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index b90c41d67b8..f98649cb1ae 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -5,6 +5,7 @@ from collections import OrderedDict from zipfile import ZipFile, is_zipfile import re +from datetime import datetime import attr import pyblish.api @@ -261,7 +262,10 @@ def get_job_info(self): job_info.Pool = self._instance.data.get("primaryPool") job_info.SecondaryPool = self._instance.data.get("secondaryPool") job_info.ChunkSize = self.chunk_size - job_info.BatchName = os.path.basename(self._instance.data["source"]) + batch_name = os.path.basename(self._instance.data["source"]) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") + job_info.BatchName = batch_name job_info.Department = self.department job_info.Group = self.group diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py index 95856137e21..dbd8645817c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -1,5 +1,6 @@ import os import json +from datetime import datetime import requests import hou @@ -60,6 +61,8 @@ def process(self, context): job_name = "{scene} [PUBLISH]".format(scene=scenename) batch_name = "{code} - {scene}".format(code=code, scene=scenename) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") deadline_user = "roy" # todo: get deadline user dynamically # Get only major.minor version of Houdini, ignore patch version diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index beda7537235..59472fdd547 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -1,6 +1,7 @@ import os import json import getpass +from datetime import datetime import requests import pyblish.api @@ -45,6 +46,9 @@ def process(self, instance): if code: batch_name = "{0} - {1}".format(code, batch_name) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") + # Output driver to render driver = instance[0] diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 315c9461c2f..ca3c765c65c 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -118,6 +118,9 @@ def get_job_info(self): src_filepath = context.data["currentFile"] src_filename = os.path.basename(src_filepath) + if os.environ.get("IS_TEST"): + src_filename += datetime.now().strftime("%d%m%Y%H%M%S") + job_info.Name = "%s - %s" % (src_filename, instance.name) job_info.BatchName = src_filename job_info.Plugin = instance.data.get("mayaRenderPlugin", "MayaBatch") diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py index 38ae5d2f7f9..1023966af84 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py @@ -1,5 +1,6 @@ import os import requests +from datetime import datetime from maya import cmds @@ -57,6 +58,8 @@ def process(self, instance): job_name = "{scene} [PUBLISH]".format(scene=scenename) batch_name = "{code} - {scene}".format(code=project_name, scene=scenename) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") # Generate the payload for Deadline submission payload = { diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index b09d2935abf..44abf12ef49 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -2,6 +2,7 @@ import re import json import getpass +from datetime import datetime import requests import pyblish.api @@ -141,8 +142,11 @@ def payload_submit( responce_data=None ): render_dir = os.path.normpath(os.path.dirname(render_path)) - script_name = os.path.basename(script_path) - jobname = "%s - %s" % (script_name, instance.name) + batch_name = os.path.basename(script_path) + jobname = "%s - %s" % (batch_name, instance.name) + if os.environ.get("IS_TEST"): + batch_name += datetime.now().strftime("%d%m%Y%H%M%S") + output_filename_0 = self.preview_fname(render_path) @@ -176,7 +180,7 @@ def payload_submit( payload = { "JobInfo": { # Top-level group name - "BatchName": script_name, + "BatchName": batch_name, # Asset dependency to wait for at least the scene file to sync. # "AssetDependency0": script_path, From d6bbc95786092a81ca438a3489cf758bc82d7107 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 16:21:34 +0200 Subject: [PATCH 26/68] OP-3426 - fix - wrong format of args --- openpype/modules/deadline/plugins/publish/submit_publish_job.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index ab3cd091794..32ad22f6b01 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -267,7 +267,7 @@ def _submit_deadline_post_job(self, instance, job, instances): ] if os.environ.get("IS_TEST"): - args["automatic_tests"] = "1" + args.extend(["--automatic_tests", "1"]) # Generate the payload for Deadline submission payload = { From f47403403b8a3301a5a28974c5c8b20b7b2b6f97 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Wed, 19 Oct 2022 16:37:54 +0200 Subject: [PATCH 27/68] OP-3426 - update CleanUp to skip for automatic tests Tests work with temp folders, so CleanUp might delete some required files (as metadata.json). Let automatic process cleanup itself. --- openpype/plugins/publish/cleanup.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/plugins/publish/cleanup.py b/openpype/plugins/publish/cleanup.py index f29e6ccd4e8..34480dd1998 100644 --- a/openpype/plugins/publish/cleanup.py +++ b/openpype/plugins/publish/cleanup.py @@ -44,6 +44,9 @@ class CleanUp(pyblish.api.InstancePlugin): def process(self, instance): """Plugin entry point.""" + if os.environ.get("IS_TEST"): + # let automatic test process clean up temporary data + return # Get the errored instances failed = [] for result in instance.context.data["results"]: From 7e6c355fea5aa00b74dfc50b166504998c20f443 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 20 Oct 2022 18:10:48 +0200 Subject: [PATCH 28/68] OP-3426 - purge temp only if not error --- tests/lib/testing_classes.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 946ad671976..8b5050a6bbe 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -50,6 +50,8 @@ class ModuleUnitTest(BaseTest): TEST_DATA_FOLDER = None + failed = False + @pytest.fixture(scope='session') def monkeypatch_session(self): """Monkeypatch couldn't be used with module or session fixtures.""" @@ -80,7 +82,7 @@ def download_test_data(self, test_data_folder, persist=False): print("Temporary folder created:: {}".format(tmpdir)) yield tmpdir - persist = persist or self.PERSIST + persist = persist or self.PERSIST or self.failed if not persist: print("Removing {}".format(tmpdir)) shutil.rmtree(tmpdir) @@ -143,7 +145,8 @@ def db_setup(self, download_test_data, env_var, monkeypatch_session): yield db_handler - if not self.PERSIST: + persist = self.PERSIST or self.failed + if not persist: db_handler.teardown(self.TEST_DB_NAME) db_handler.teardown(self.TEST_OPENPYPE_NAME) @@ -304,6 +307,7 @@ def publish_finished(self, dbcon, launched_app, download_test_data, while launched_app.poll() is None: time.sleep(0.5) if time.time() - time_start > timeout: + self.failed = True launched_app.terminate() raise ValueError("Timeout reached") @@ -317,7 +321,7 @@ def test_folder_structure_same(self, dbcon, publish_finished, Compares only presence, not size nor content! """ - published_dir_base = os.path.join(output_folder_url, "output") + published_dir_base = output_folder_url expected_dir_base = os.path.join(download_test_data, "expected") @@ -347,6 +351,7 @@ def publish_finished(self, dbcon, launched_app, download_test_data, while launched_app.poll() is None: time.sleep(0.5) if time.time() - time_start > timeout: + self.failed = True launched_app.terminate() raise ValueError("Timeout reached") @@ -355,9 +360,11 @@ def publish_finished(self, dbcon, launched_app, download_test_data, "**/*_metadata.json"), recursive=True) if not metadata_json: + self.failed = True raise RuntimeError("No metadata file found. No job id.") if len(metadata_json) > 1: + self.failed = True raise RuntimeError("Too many metadata files found.") with open(metadata_json[0]) as fp: @@ -370,6 +377,7 @@ def publish_finished(self, dbcon, launched_app, download_test_data, deadline_url = deadline_module.deadline_urls["default"] if not deadline_url: + self.failed = True raise ValueError("Must have default deadline url.") url = "{}/api/jobs?JobId={}".format(deadline_url, deadline_job_id) @@ -379,14 +387,17 @@ def publish_finished(self, dbcon, launched_app, download_test_data, while not valid_date_finished: time.sleep(0.5) if time.time() - time_start > timeout: + self.failed = True raise ValueError("Timeout for DL finish reached") response = requests.get(url, timeout=10) if not response.ok: + self.failed = True msg = "Couldn't connect to {}".format(deadline_url) raise RuntimeError(msg) if not response.json(): + self.failed = True raise ValueError("Couldn't find {}".format(deadline_job_id)) # '0001-...' returned until job is finished From 5d9aaecea0e48938c3f3735d5b46c76ec932e8f3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 21 Oct 2022 15:58:25 +0200 Subject: [PATCH 29/68] OP-3426 - fix output folder output_folder_url should be used, contains already 'output' subfolder --- tests/integration/hosts/aftereffects/lib.py | 3 +-- tests/integration/hosts/maya/lib.py | 1 - tests/integration/hosts/nuke/lib.py | 15 ++++++++++++--- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 0f7513c7d37..541103cb2ec 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -20,8 +20,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): "input", "workfile", "test_project_test_asset_TestTask_v001.aep") - dest_folder = os.path.join(download_test_data, - "output", + dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, "work", diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index a0c52141818..6610fac118a 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -21,7 +21,6 @@ def last_workfile_path(self, download_test_data, output_folder_url): "workfile", "test_project_test_asset_TestTask_v001.mb") dest_folder = os.path.join(output_folder_url, - "output", self.PROJECT, self.ASSET, "work", diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index ed2af38d1c5..564a5d3b887 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -1,6 +1,6 @@ import os import pytest -import shutil +import re from tests.lib.testing_classes import ( HostFixtures, @@ -15,7 +15,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. """ - source_file_name = "test_project_test_asset_TestTask_v001.nk" + source_file_name = "test_project_test_asset_test_task_v001.nk" src_path = os.path.join(download_test_data, "input", "workfile", @@ -31,7 +31,16 @@ def last_workfile_path(self, download_test_data, output_folder_url): dest_path = os.path.join(dest_folder, source_file_name) - shutil.copy(src_path, dest_path) + # rewrite old root with temporary file + # TODO - using only C:/projects seems wrong - but where to get root ? + replace_pattern = re.compile(re.escape("C:/projects"), re.IGNORECASE) + with open(src_path, "r") as fp: + updated = fp.read() + updated = replace_pattern.sub(output_folder_url.replace("\\", '/'), + updated) + + with open(dest_path, "w") as fp: + fp.write(updated) yield dest_path From e38ae9d91911a86d8f2683d35cc92b6a02834631 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 21 Oct 2022 16:45:14 +0200 Subject: [PATCH 30/68] OP-3426 - refactor - rename workfile --- tests/integration/hosts/maya/lib.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index 6610fac118a..1411ccca5f4 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -19,7 +19,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, @@ -27,7 +27,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") shutil.copy(src_path, dest_path) yield dest_path From 127a0dcb1fc8816149f6b30e7e0e169cd9d1f92f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 21 Oct 2022 16:46:11 +0200 Subject: [PATCH 31/68] OP-3426 - refactor - use class variable --- tests/lib/testing_classes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 8b5050a6bbe..e4f816f4d0a 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -139,7 +139,7 @@ def db_setup(self, download_test_data, env_var, monkeypatch_session): overwrite=True, db_name_out=self.TEST_DB_NAME) - db_handler.setup_from_dump("openpype", backup_dir, + db_handler.setup_from_dump(self.TEST_OPENPYPE_NAME, backup_dir, overwrite=True, db_name_out=self.TEST_OPENPYPE_NAME) From f703e53651bd0ceb845563a01601e48bf1d14f6d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Oct 2022 10:48:14 +0200 Subject: [PATCH 32/68] OP-3426 - fix - wrong flag used in pype_commands --- openpype/pype_commands.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pype_commands.py b/openpype/pype_commands.py index d08a812c61f..932fdc9be41 100644 --- a/openpype/pype_commands.py +++ b/openpype/pype_commands.py @@ -299,7 +299,7 @@ def run_tests(self, folder, mark, pyargs, if pyargs: args.extend(["--pyargs", pyargs]) - if persist: + if test_data_folder: args.extend(["--test_data_folder", test_data_folder]) if persist: From d35ea96bd08e099adae7a52da5f0585e3d8a0760 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Oct 2022 11:52:08 +0200 Subject: [PATCH 33/68] OP-3426 - added filtering of published folders from comparing Some files or folders are dynamically created and cannot be part of comparing of published and expected folder structure. (Example is Logs in AE from DL) --- tests/integration/hosts/aftereffects/lib.py | 5 +++++ tests/integration/hosts/maya/lib.py | 4 ++++ tests/integration/hosts/nuke/lib.py | 3 +++ tests/integration/hosts/photoshop/lib.py | 4 ++++ tests/lib/testing_classes.py | 23 +++++++++++++++++---- 5 files changed, 35 insertions(+), 4 deletions(-) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 541103cb2ec..3fb1fa18cea 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -37,6 +37,11 @@ def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + @pytest.fixture(scope="module") + def skip_compare_folders(self): + # skip folder that contain "Logs", these come only from Deadline + return ["Logs"] + class AELocalPublishTestClass(AEHostFixtures, PublishTest): """Testing class for local publishes.""" diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index 1411ccca5f4..e7480e25fa2 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -44,6 +44,10 @@ def startup_scripts(self, monkeypatch_session, download_test_data): os.pathsep, original_pythonpath)) + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] + class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest): """Testing class for local publishes.""" diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index 564a5d3b887..8e97cd6fe41 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -56,6 +56,9 @@ def startup_scripts(self, monkeypatch_session, download_test_data): os.pathsep, original_nuke_path)) + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): """Testing class for local publishes.""" diff --git a/tests/integration/hosts/photoshop/lib.py b/tests/integration/hosts/photoshop/lib.py index 16ef2d3ae68..23ae79434e9 100644 --- a/tests/integration/hosts/photoshop/lib.py +++ b/tests/integration/hosts/photoshop/lib.py @@ -32,3 +32,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] \ No newline at end of file diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index e4f816f4d0a..8aaeb4304bf 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -9,6 +9,7 @@ import glob import platform import requests +import re from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler @@ -316,7 +317,8 @@ def publish_finished(self, dbcon, launched_app, download_test_data, yield True def test_folder_structure_same(self, dbcon, publish_finished, - download_test_data, output_folder_url): + download_test_data, output_folder_url, + skip_compare_folders): """Check if expected and published subfolders contain same files. Compares only presence, not size nor content! @@ -334,9 +336,17 @@ def test_folder_structure_same(self, dbcon, publish_finished, glob.glob(expected_dir_base + "\\**", recursive=True) if f != expected_dir_base and os.path.exists(f)) - not_matched = expected.symmetric_difference(published) - assert not not_matched, "Missing {} files".format( - "\n".join(sorted(not_matched))) + filtered_published = set() + for pub_path in published: + for val in skip_compare_folders: + if not re.search(val, pub_path): + filtered_published.add(pub_path) + + not_matched = expected.symmetric_difference(filtered_published) + if not_matched: + self.failed = True + raise AssertionError("Missing {} files".format( + "\n".join(sorted(not_matched)))) class DeadlinePublishTest(PublishTest): @@ -419,3 +429,8 @@ def last_workfile_path(self, download_test_data, output_folder_url): def startup_scripts(self, monkeypatch_session, download_test_data): """"Adds init scripts (like userSetup) to expected location""" raise NotImplementedError + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + """Use list of regexs to filter out published folders from comparing""" + raise NotImplementedError From e94c524eceb8fb0975645ba6cccc2d2ef2049b44 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Oct 2022 15:10:26 +0200 Subject: [PATCH 34/68] OP-3426 - fix - counts of expected items in DB Format of subset names was changed, default settings contain also hero version etc. --- .../maya/test_deadline_publish_in_maya.py | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index 16ff48d7fdb..1c23129b562 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -49,45 +49,47 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderTestTaskMain_beauty")) + name="renderTest_taskMain_beauty")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="workfileTesttask")) + name="workfileTest_task")) - failures.append(DBAssert.count_of_types(dbcon, "representation", 6)) + failures.append(DBAssert.count_of_types(dbcon, "representation", 8)) + # hero included additional_args = {"context.subset": "modelMain", "context.ext": "abc"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 2, additional_args=additional_args)) + # hero included additional_args = {"context.subset": "modelMain", "context.ext": "ma"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 2, additional_args=additional_args)) additional_args = {"context.subset": "modelMain", "context.ext": "mb"} failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, + DBAssert.count_of_types(dbcon, "representation", 0, additional_args=additional_args)) - additional_args = {"context.subset": "renderTestTaskMain_beauty", + additional_args = {"context.subset": "renderTest_taskMain_beauty", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - additional_args = {"context.subset": "renderTestTaskMain_beauty", + additional_args = {"context.subset": "renderTest_taskMain_beauty", "context.ext": "jpg"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - additional_args = {"context.subset": "renderTestTaskMain_beauty", + additional_args = {"context.subset": "renderTest_taskMain_beauty", "context.ext": "h264_exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, From 1f18639c4e866e7032caa23e096fbaa63ee265cb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Oct 2022 16:43:40 +0200 Subject: [PATCH 35/68] OP-3426 - fix - counts of expected items in DB Format of subset names was changed, default settings contain also hero version etc. --- tests/integration/hosts/nuke/test_publish_in_nuke.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index a3947a30ae5..8fff54a4bef 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -61,7 +61,7 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderCompositingInNukeMain")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, @@ -70,7 +70,7 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderCompositingInNukeMain", + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, From 22a86f89f73b351599b87cc6e310c8f4799a894d Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 24 Oct 2022 18:22:34 +0200 Subject: [PATCH 36/68] OP-3426 - updated PS testing class --- tests/integration/hosts/photoshop/lib.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/integration/hosts/photoshop/lib.py b/tests/integration/hosts/photoshop/lib.py index 23ae79434e9..9d51a11c061 100644 --- a/tests/integration/hosts/photoshop/lib.py +++ b/tests/integration/hosts/photoshop/lib.py @@ -2,10 +2,13 @@ import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest +) -class PhotoshopTestClass(HostFixtures): +class PhotoshopTestClass(HostFixtures, PublishTest): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -35,4 +38,4 @@ def startup_scripts(self, monkeypatch_session, download_test_data): @pytest.fixture(scope="module") def skip_compare_folders(self): - yield [] \ No newline at end of file + yield [] From f73330565e9773032473d378e54425eed76ca09c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 25 Oct 2022 11:28:14 +0200 Subject: [PATCH 37/68] OP-3426 - filter out Auto-Save folder --- tests/integration/hosts/aftereffects/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 3fb1fa18cea..c47121a035d 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -40,7 +40,7 @@ def startup_scripts(self, monkeypatch_session, download_test_data): @pytest.fixture(scope="module") def skip_compare_folders(self): # skip folder that contain "Logs", these come only from Deadline - return ["Logs"] + return ["Logs", "Auto-Save"] class AELocalPublishTestClass(AEHostFixtures, PublishTest): From dab37abea925054efea5c90322db2868796eea70 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 25 Oct 2022 11:30:59 +0200 Subject: [PATCH 38/68] OP-3426 - updated filtering logic --- tests/lib/db_handler.py | 10 ++++++---- tests/lib/testing_classes.py | 7 +++++-- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b80a296b607..16c4650ef6a 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -228,12 +228,14 @@ def _import_query(self, uri, sql_url, return query # Examples -# handler = DBHandler(uri="mongodb://localhost:27017") +handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_zips\\test_maya_publish\\input\\dumps" +backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # # # -# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon", collection="test_project") +handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") +#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") + +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 8aaeb4304bf..ca503765846 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -338,9 +338,12 @@ def test_folder_structure_same(self, dbcon, publish_finished, filtered_published = set() for pub_path in published: - for val in skip_compare_folders: - if not re.search(val, pub_path): + if skip_compare_folders: + if not any([re.search(val, pub_path) + for val in skip_compare_folders]): filtered_published.add(pub_path) + else: + filtered_published.add(pub_path) not_matched = expected.symmetric_difference(filtered_published) if not_matched: From 2ed190cdfd324166f21487bab1147488091a03a9 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 25 Oct 2022 18:51:09 +0200 Subject: [PATCH 39/68] OP-3426 - remove unwanted uncommenting --- tests/lib/db_handler.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index 16c4650ef6a..b5085aab3d0 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -228,11 +228,11 @@ def _import_query(self, uri, sql_url, return query # Examples -handler = DBHandler(uri="mongodb://localhost:27017") +# handler = DBHandler(uri="mongodb://localhost:27017") # # -backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # # # -handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") +# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") #handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") From c68f5f45d83b7ac1dc384ac9d1f7398145103257 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 25 Oct 2022 18:52:52 +0200 Subject: [PATCH 40/68] OP-3426 - Hound --- tests/lib/db_handler.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b5085aab3d0..ef3c2b52f60 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -230,12 +230,12 @@ def _import_query(self, uri, sql_url, # Examples # handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # noqa # # # -# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") -#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") +# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") # noqa +#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # noqa -# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # noqa # handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") From 9192005a8417f4eeedd8df1af54c2814c9df9c20 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 27 Oct 2022 15:40:05 +0200 Subject: [PATCH 41/68] OP-3426 - increase timeout for Nuke It seems that it takes longer on testing PC, this should be safe. --- tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py | 2 +- tests/integration/hosts/nuke/test_publish_in_nuke.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py index 98b463b9ae7..27c76a2274a 100644 --- a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py @@ -42,7 +42,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): APP = "nuke" - TIMEOUT = 120 # publish timeout + TIMEOUT = 180 # publish timeout # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 8fff54a4bef..f6e5ad6921e 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -41,7 +41,7 @@ class TestPublishInNuke(NukeLocalPublishTestClass): APP = "nuke" - TIMEOUT = 120 # publish timeout + TIMEOUT = 180 # publish timeout # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit From 381ea92477969b341647daf6a2ee86390177bf02 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 27 Oct 2022 15:57:41 +0200 Subject: [PATCH 42/68] OP-3426 - fix subset name in Nuke --- tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py index 27c76a2274a..bac3b6898e3 100644 --- a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py @@ -62,7 +62,7 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderCompositingInNukeMain")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, @@ -71,7 +71,7 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderCompositingInNukeMain", + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, From b0f0644fccb820a60b4cf0df74e138bcdd3b8867 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Thu, 3 Nov 2022 16:53:59 +0100 Subject: [PATCH 43/68] OP-3426 - updated skeleton source zip file --- tests/resources/test_data.zip | Bin 4979 -> 5098 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/tests/resources/test_data.zip b/tests/resources/test_data.zip index b37a30ed7a73b96d2c6398207657607b63fac0bc..e22b9acdbdbcd7312776e33918ac2a6a9211dab3 100644 GIT binary patch delta 3502 zcmZuz2|SeD7akNE>zJvm*~Zdf6hgL?Em^XS)PG70k#(}mGRZP2#xfWxh3ren(jdy7 zWM8wU7%IkE+5KnI^7+2*`+I+L?)!V5bIv{Yp1IGm=x4ELtCAk(HX3HSeGmXG005{k zw?M?$!S?xvp%5|g$3WCy%IDuh7!P9S61VcQa&mRC^ssgJaF;gH+a)vpAm8a;dTYV} z0NjND0CH3?W)GN3u|&JMI@qE-;MVT0E~6H0QM@__G0L(Hg8ov1ASg{~7@gy5qFqz_ zx&{vAchF914Q1=<1ZkB!F?vEaelEu$H+MibLi-KNDjhqLIMuKpen(H8ccO`g{cs2* zh+SbD5baZ#TlL&;q@0(|qydA>{9JtjF`pOsb$x502cUBXRIjizf|(~+4zD%#f5&{i z*4l2$dY(=_w{KYGS!b-%n+fpQ;*r53f#;}eTrZ*~&C@^72W5ee_v&}((dO%fpDjFV z5$x|TT;lRf{v6BdcoNeW;YfxWIBM08XKG|N!FFhUEu=;9hSU0#X-5`j{1WV%N`<|N zciP6*$1=~h$a69haU&DfD#hr^U`MtbZ%rq$g?7*_90GP+kif72dR}RpcHps$&yi-> zl#JHS!#0D@j}*s(z0xPoMp)VLT_}M_RRqhI`7O{Ec(wb{>zK;h)ut%S2o}Dq!@pBw zTMs4!eCd3OCj%s>+khlY_(75;x&TPhUA1o6gy0_fGN>F&U;*Q33tvs^(EtZDZHuFY zW*(DkZqF7Rl!wP2AE-_7%Z!>e&9jqLMYtVNyk}VWFxfB;aV@R8Zc-Lq3h_Cl<787o zdIGxH70sO7IbW-n(b;?>PPgqM$HQ6Ha&-S8Q+d*vTDD_`&UQ+e%@AaO<)^I}b?}aE z9s=51&eZa?j$_G6fh3$Dl1PNUM`;4d2RfEa?i_$V4OL#({QIB}m8!G+C8C)^rmHY#NN@pfq>R3z0?U^U?B z7som116C2RZX|};xvkmXODi+!1eyRj9O%!7l)obSH96v5!>Fsr#qYz1t6aZU+L4_z z50q?*^7AQg_q7dNNzN3D>MAgdlW*^=H$@L0Gh;wHg50^t%M}MS2Ggtk7K}F~P+gy^ z*v-q%sETteFpLz=83aprnw%QzgMDZ+bB1|u5SqDqXSgx=#Qmo5>cxTU_5G1^5(;&j zN7OhM#AVG5bepCd$r#ADJXdmGT^K>#qKpG7IQNM`e5*hyNASHtc+eG%TPGI8LSEuX z<8MMpi=H~`zQc{*ynUFIuE$1(%54jM8xa|9`hr^HlM*L-5I(u3I@&LK)pZV>PmTMi zVP~0}8Z?i&IXjd;0S*=LVAJ?=ON0w39Me%6{452MvD%Yo6ravJ2=_&B!D3um+Ve&6FeIdM({ptM_}l z$A~NoJx-oWwnSQhEaA(A_uN#uI^CdI8+9zB-S>InZ)G>LT!PLq-m=U%=&_&7@nuZL zV)CdTAtLQ$e7?R8{v-~RR&kjg$}K7Wm_P(wE_E+8D2}bkCri~Ml~!(BU2j=J64iB# zkC&=o{DUsuts*LG7M^mLldJE6clPUHF*4-h)c~#e;qz(U_gTPJqP5lynL>hLEnztK zY@|zP`xE9j7y1n9pgQ^&AD!}6@!mvn6D0OqUfS3xAs5YJm!U?68Zu97B{z$*LXI}h ze7Ztnm>2*DsBBfn?yuUpe5zur8Vgs$Vb9i(AFy%w(4toscmuT$RlqfM*SmIL7@FH+ ztbU_%n*wF_fyRu07gb(^1m9Ehts_r+m2Dm=1N}?FuAX>2T@8e3y}1&KS~CoFe7Ax` zgrCnXT}$fVWyBZT40PQc@MIgQx0^j;I8oAGtmT;PP0A+B<}jKCUjF7v`sjv@+dJ}tY!GTx*oImGW6-=b*pNrjCI;m=37Yi6wW^-TG6OJe%$2kYgw6pXm{T+fD> zMxj8;ZT^_@s#33K=6_z#_*kGQCRS@$UCZTt6BDNf&>vhHnwg=kL9CL4U#>}NXBf2h$Oo*)t5tQ5ZLJ;3ptnlQ#&O%n@zj^k9o@J1#EIeEAb7f$ z^LjSHggE9EOmt_W=V^{{S~`vl#?KVj~+9v;_S?A(6{3_8Fs@4Ese!>x47pTc-Rd6lUw`dt>@m6iunCjbeV zsM1G1yLG9^jZWdptHiLndxGV1N3Y6-ROx-}o|@c|*{`N}(=?>!;ENO{RzYd7Ga)*q zD^*72Qwg@K>LuKWhw)mfJU6HDJoCwUh)#WEoJL{3k*dMc$c-F&caw8Kmw3A1%hO3t zOOC{CHKB~jdf~J{nx83BbQ7flCKx3N z$mn91x?>P6Z|F9S?Niv{#LAPaAQNll25HT9>jRL3`r;rK2b#tCMo18l`Ku3kv8rQC zfv4c(q7Ac9|8Y*ZmO%AD!gC!X`|YLR?;hViC2E^&4%DEvW7f@rKymfKeyrp1S6`pb zoLONNoRq(|?a98(A9V-3Mz8wft{obM*&#CqazA|IlC_dPgUzx>9OUmShrzY^!9zUU z+*naWv|{sAM~|DVFO=o9GFDhDc1jQTM9FUZ4D5{N|K`*3E!`gUD+RmY!u#G00>6zV zl*86shCsD39hRiqZB8Liq{HuBN+T~xt*MblM}T>R`!f-iLk#K{_nk{p{QAU5js=OS z9z%|^!#-}4aLwzJJn6cCY-9~QA+Iu99`Ck&a`}E_J=?FX|5?_P%+f!UE#8S)X4g<; z62ODOV0<>KFJ2TZfVoK7fgc*x#>&G=+C=NLrhyjR*~UnZh86<&Batbn--CURB}v(- zKMt2RN&tz;XNoE6~uY+pPl=U7=O{>zu5v5tK$cI zZ{I6{jtEfC+n&FNpDY>xmE{hk#@x#yfW-cjsmM8lTyRnN9*|HxZB&3-}vX3Q(I*n~`kZA1tK@^TskzGjE zV=Y3~h)mg!gzBHExT^F2@AZAx`+VQ^Ecg4o_jNt@lGui50)`O)jwXv*k=4MG^qe%~ zGFXm-W&l4dA@Tr(i_8vzB4jO0=;*ltFu(+0Viqjb2sL-tsE*!X0Fd5yBFg$X`}@0k zx%kOo{k*-h6U~gDo`ju};%W=YU_ z`&arCedvumGx8}XG`DkDLg*ebE<5FVdkKxVbxyQ(o3`g*pnyWm0|Sh;fLiEa0_Qd) zSu>-#0=O>ym#A9`J|!`Rj`$*;?TH}P!N+(Q|3u_w7*qR4YygY(^~ROFGnC`Z!7R^x zGRhL=m5#fg^Hh)dq7EVZ99!Ll88;`yhVK3uV8eSyzV>dd#=>e{{$n%!>{#WR_c2>q z0~DdlXA4rJ+&9IE|43-!3)DxuSm0i+coqhS_prZJ4max6Xx+HJH@<;@{@kRXLhj$L zQJa)WeU<-5lRTaMF*uArM#hcWE$1F>XqX&##7v#3!vmP{?JY*H`Ve{g9l;_FV;%bf z*Wf@ChuW+?C(&lLS(U4c_ARVxGt;vQ>j6F>IGZm?jJVZrze7>>1$mvVjspkJT_y1c zvWR3T8jZ3k`IN?2D*T6p^mF5hm95CMXfE665qktmd-dw+Q*v1uPS0Q|}T!SA>k zQU1<;{!TdT4@59?vb1J)W6kNIatX=>0J8hO#C_kdJKopZ&H1ALS58cvwf7d#=hm!X zL(lWd@FZng3L-<*-0M6u&UlU~xn&xA!Yf_F;UwpGN5*xB)w_E>eXQ zyB?TL6au38BF+d4Pi}*dKE3b8*3`yJ&`9g;HpK-6>NAyfBA2MaaKD{LuzthjW2yI~ z!%py<0`GhCrebsV`2Zb}sd;6(nvhCWjdW6&ewT)eo0O!Dpvly=K1Hc`Z)EE$SpMl-(eO9@0 zP;B#cM0dg=;hhBn)^cstpLt#R@N<2-lxL?iNtZj}V@a~jpuwu%@u&h`id?Z}Nm7EX z;0(inXkb?|mLZ6TfO&FNX{K_wrxn#xG;O?^7>Z3)o6`5Df7xCUhVc zTdPw!Oc7Q&XhHq09wvcg@J*>=%Pc9>?#snV(@I>wPnrHh)YT|6*FvwZ``+sa1cFK9 z2FgaoVJ-Mb!yTi@0P)o6w$T2IsxjcU^c94J%_V#aA9{FNr=^|0xL)~)Cd$^^q|%P> z@@#G)+MTE)GnOFz0WB=Vd*ZxH;>|xyz&<@Xk*BpHz$8x&g0FqMyez>~SU7$AX!O`8 zT;5-L{nhXX@IxAm$W%p=-O;hgjp;Z!KKZ*w=bVlc8GVmU1e~Tc;!>3ABV+}BA3S;s z2h^aOAJ-UDGA=zYVKm*AP?+QKUfm)gUwEd*5rt5`@k2Y zV%%19qqE;7(rMU!_g3tPjTNTV1Q~~5zQxq@PR2M$)U1n%V1vyYfn8AE zi(9p|*lREpUq;&xrUnU&udMNM>u9+}pKRx=z{!ptJulQcl9V?+=vdTEDi@yD&;9eB z&?$-TsFq8}Qn+Sxg}VaDoEto%38_K9F(8V^=}p@3c4dW%x8Q@i(E^aj${%O?dGa|23~JX(r@Z~eqLJPK@skyEvm1)B-&nD z`u3Bq1cqb8ofbWGCFLK^&r4TgZUW4v9ZUkO3YPV>T%K` zW`j zvREhu$*t?Lib$dRsQ;AlF3C}hf`~YB?0LO&EEikT%=*VUvk^R3`s=i3^fF!@8aH|K zQExbpnp!9Xl1JADuYK5zCfrLUUUP(Oc7|k$j6Mu}-(ONRXCdFY+V@x3CUc1SEk?)C zD_GCXEU#M+C^+uF@8XL3GeddJ!u9 zHXC2JC@vWpyfNh1W9iT~DIEN1?fDk2dffF~t;^|Wn!H+&?A@JDS?X~mjl_PlVtW_# z441Bfb@%9T2WW3p+k6gF4-TL>g3x^Q%l&gkPYmzse%h_M3<8mfgW-qZdyD69l^PGu+1oNcu z1WI~zi@KB=U$dkmzpL>!S&aTDl`>3pbfVi`b$u9PMd($lnu0HB)fQxDmPHx|tccW^ zktZmluFG+cA|ve$TN`c<2=*HGF>O5^F5Wr6sikgj!>V$fLhg$9^vLRox$p zhrW+R@5+e@&K=>)==wR0jz?gY2X83+-z zu-2_@jtE^uHX_{d^(Vydjune^%oopc&c3BA3BkdZULF@Ngvh*~jgaeTW3J->X@5Rn z-Yd*RvH&T)fT`4~k!r)yx+-m>Q+=H_zc3ZOoKcW9gr1H@E%SF>r z2igPx#A%W?8LB{!q#4~%sS5ic5P1t4y03d1+2J6&uQ&6K9r8^9?@@n6?OzsI4h9kW zRSgMI+w*O~s9W84U#K*-FbK>I{2zV19|It%*7kmo+RtkF*9OY>{s0eJ^tf=F7xPZa+i z@C~3X3$4;YfFg(3xBg~x!NHQOzV1xc;YNf T7^rlim4E?F^Z<|prG5PurvhMj From 5c34fcb223c1be4d58b0d445bde31297320a1a3f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Nov 2022 15:25:16 +0100 Subject: [PATCH 44/68] OP-3426 - added requirements for mongodump in readme --- tests/README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/README.md b/tests/README.md index 69828cdbc29..d36b6534f8b 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,5 +1,15 @@ Automatic tests for OpenPype ============================ + +Requirements: +============ +Tests are recreating fresh DB for each run, so `mongorestore`, `mongodump` and `mongoimport` command line tools must be installed and on Path. + +You can find intallers here: https://www.mongodb.com/docs/database-tools/installation/installation/ + +You can test that `mongorestore` is available by running this in console, or cmd: +```mongorestore --version``` + Structure: - integration - end to end tests, slow (see README.md in the integration folder for more info) - openpype/modules/MODULE_NAME - structure follow directory structure in code base From 8aa05190c6fdd7f5f2123a1e3ca1c72baa4cecb4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Nov 2022 15:28:58 +0100 Subject: [PATCH 45/68] OP-3426 - fix wrong command in web docs --- website/docs/dev_testing.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/dev_testing.md b/website/docs/dev_testing.md index cab298ae37c..5e23afbf721 100644 --- a/website/docs/dev_testing.md +++ b/website/docs/dev_testing.md @@ -23,7 +23,7 @@ If you would like just to experiment with provided integration tests, and have p ``` - From build: ``` -- ${OPENPYPE_BUILD}/openpype_console run {ABSOLUTE_PATH_OPENPYPE_ROOT}/tests/integration/hosts/nuke` +- ${OPENPYPE_BUILD}/openpype_console runtests {ABSOLUTE_PATH_OPENPYPE_ROOT}/tests/integration/hosts/nuke` ``` Modify tests path argument to limit which tests should be run (`../tests/integration` will run all implemented integration tests). From fc99a9a35eef578ee16e17bcf00d9e29987dad3f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Nov 2022 15:32:45 +0100 Subject: [PATCH 46/68] OP-3426 - raise better exception if mongorestore not found --- tests/lib/db_handler.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index ef3c2b52f60..82e741cc3b3 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -118,7 +118,7 @@ def setup_from_dump(self, db_name, dump_dir, overwrite=False, "Run with overwrite=True") else: if collection: - if collection in self.client[db_name_out].list_collection_names(): + if collection in self.client[db_name_out].list_collection_names(): # noqa self.client[db_name_out][collection].drop() else: self.teardown(db_name_out) @@ -132,7 +132,11 @@ def setup_from_dump(self, db_name, dump_dir, overwrite=False, db_name=db_name, db_name_out=db_name_out, collection=collection) print("mongorestore query:: {}".format(query)) - subprocess.run(query) + try: + subprocess.run(query) + except FileNotFoundError: + raise RuntimeError("'mongorestore' utility must be on path." + "Please install it.") def teardown(self, db_name): """Drops 'db_name' if exists.""" From 4a4726fe79690f18c07b87fa62e26eb0bbd18b6e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Nov 2022 15:39:22 +0100 Subject: [PATCH 47/68] OP-3426 - add more details to web documentation --- website/docs/dev_testing.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/website/docs/dev_testing.md b/website/docs/dev_testing.md index 5e23afbf721..7136ceb4792 100644 --- a/website/docs/dev_testing.md +++ b/website/docs/dev_testing.md @@ -14,6 +14,11 @@ But many tests should yet be created! - installed DCC you want to test - `mongorestore` on a PATH +You could check that `mongorestore` is available by running this in console (or cmd), it shouldn't fail and you should see version of utility: +```commandline +mongorestore --version +``` + If you would like just to experiment with provided integration tests, and have particular DCC installed on your machine, you could run test for this host by: - From source: From 350685b4c6bd4c688da3483ef5989153ee0205db Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 4 Nov 2022 18:08:01 +0100 Subject: [PATCH 48/68] OP-3426 - changed APP to APP_GROUP APP_GROUP is more descriptive --- .../aftereffects/test_deadline_publish_in_aftereffects.py | 4 ++-- .../hosts/aftereffects/test_publish_in_aftereffects.py | 4 ++-- .../aftereffects/test_publish_in_aftereffects_multiframe.py | 4 ++-- .../integration/hosts/maya/test_deadline_publish_in_maya.py | 2 +- tests/integration/hosts/maya/test_publish_in_maya.py | 2 +- .../integration/hosts/nuke/test_deadline_publish_in_nuke.py | 2 +- tests/integration/hosts/nuke/test_publish_in_nuke.py | 4 ++-- .../hosts/photoshop/test_publish_in_photoshop.py | 4 ++-- tests/lib/testing_classes.py | 6 +++--- 9 files changed, 16 insertions(+), 16 deletions(-) diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py index 103ce6fe5b8..2c88f8d8019 100644 --- a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py @@ -34,10 +34,10 @@ class TestDeadlinePublishInAfterEffects(AEDeadlinePublishTestClass): "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index 191e86ebafb..dc7c9c608d8 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -32,10 +32,10 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py index dd61e72c6f5..9c5a8de0d15 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -19,10 +19,10 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index 1c23129b562..f23357bf8f3 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -28,7 +28,7 @@ class TestDeadlinePublishInMaya(MayaDeadlinePublishTestClass): "test_maya_deadline_publish.zip", "") ] - APP = "maya" + APP_GROUP = "maya" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index bff7ccb2f7c..50e276872be 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -29,7 +29,7 @@ class TestPublishInMaya(MayaLocalPublishTestClass): ("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "") ] - APP = "maya" + APP_GROUP = "maya" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" diff --git a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py index bac3b6898e3..cd9cbb94f89 100644 --- a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py @@ -40,7 +40,7 @@ class TestDeadlinePublishInNuke(NukeDeadlinePublishTestClass): "test_nuke_deadline_publish.zip", "") ] - APP = "nuke" + APP_GROUP = "nuke" TIMEOUT = 180 # publish timeout diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index f6e5ad6921e..f84f13fa20d 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -39,9 +39,9 @@ class TestPublishInNuke(NukeLocalPublishTestClass): ("1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI", "test_Nuke_publish.zip", "") ] - APP = "nuke" + APP_GROUP = "nuke" - TIMEOUT = 180 # publish timeout + TIMEOUT = 50 # publish timeout # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py index d4ab3e77346..4aaf43234db 100644 --- a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py @@ -41,11 +41,11 @@ class TestPublishInPhotoshop(PhotoshopTestClass): ("1zD2v5cBgkyOm_xIgKz3WKn8aFB_j8qC-", "test_photoshop_publish.zip", "") ] - APP = "photoshop" + APP_GROUP = "photoshop" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index ca503765846..b7993ec4baf 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -194,7 +194,7 @@ class PublishTest(ModuleUnitTest): TODO: implement test on file size, file content """ - APP = "" + APP_GROUP = "" TIMEOUT = 120 # publish timeout @@ -216,10 +216,10 @@ def app_name(self, app_variant): if not app_variant: variant = ( application_manager.find_latest_available_variant_for_group( - self.APP)) + self.APP_GROUP)) app_variant = variant.name - yield "{}/{}".format(self.APP, app_variant) + yield "{}/{}".format(self.APP_GROUP, app_variant) @pytest.fixture(scope="module") def output_folder_url(self, download_test_data): From 058d4014a88cb538e729a0dc2482f232ccb4958a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 7 Nov 2022 11:28:30 +0100 Subject: [PATCH 49/68] OP-3426 - refactor - cleaned up cli arguments --- openpype/cli.py | 13 ++++--------- .../plugins/publish/submit_publish_job.py | 2 +- .../repository/custom/plugins/GlobalJobPreLoad.py | 2 +- start.py | 15 +++++++-------- 4 files changed, 13 insertions(+), 19 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index a7b1c67b75c..9034583a7a9 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -24,6 +24,8 @@ help=("Enable debug")) @click.option("--verbose", expose_value=False, help=("Change OpenPype log level (debug - critical or 0-50)")) +@click.option("--automatic-tests", is_flag=True, expose_value=False, + help=("Run in automatic tests mode")) def main(ctx): """Pype is main command serving as entry point to pipeline system. @@ -127,11 +129,7 @@ def webpublisherwebserver(executable, upload_dir, host=None, port=None): @click.option( "--envgroup", help="Environment group (e.g. \"farm\")", default=None ) -@click.option( - "--automatic_tests", help="Is this automatic test", default=None -) -def extractenvironments(output_json_path, project, asset, task, app, envgroup, - automatic_tests): +def extractenvironments(output_json_path, project, asset, task, app, envgroup): """Extract environment variables for entered context to a json file. Entered output filepath will be created if does not exists. @@ -152,10 +150,7 @@ def extractenvironments(output_json_path, project, asset, task, app, envgroup, multiple=True) @click.option("-g", "--gui", is_flag=True, help="Show Publish UI", default=False) -@click.option( - "--automatic_tests", help="Is this automatic test", default=None -) -def publish(paths, targets, gui, automatic_tests): +def publish(paths, targets, gui): """Start CLI publishing. Publish collects json from paths provided as an argument. diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 32ad22f6b01..aabc4bf84cd 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -267,7 +267,7 @@ def _submit_deadline_post_job(self, instance, job, instances): ] if os.environ.get("IS_TEST"): - args.extend(["--automatic_tests", "1"]) + args.append("--automatic-tests") # Generate the payload for Deadline submission payload = { diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 84b435ccdd9..c0c7ee4d54b 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -181,7 +181,7 @@ def inject_openpype_environment(deadlinePlugin): add_args["envgroup"] = "farm" if job.GetJobEnvironmentKeyValue('IS_TEST'): - add_args["automatic_tests"] = "true" + args.append("--automatic-tests") if all(add_args.values()): for key, value in add_args.items(): diff --git a/start.py b/start.py index 5242a50f147..ec0465b0f52 100644 --- a/start.py +++ b/start.py @@ -242,6 +242,10 @@ def _print(message: str): sys.argv.remove("--debug") os.environ["OPENPYPE_DEBUG"] = "1" +if "--automatic-tests" in sys.argv: + sys.argv.remove("--automatic-tests") + os.environ["IS_TEST"] = "1" + import igniter # noqa: E402 from igniter import BootstrapRepos # noqa: E402 @@ -486,7 +490,6 @@ def _process_arguments() -> tuple: use_version = None use_staging = False commands = [] - automatic_tests = False # OpenPype version specification through arguments use_version_arg = "--use-version" @@ -571,10 +574,7 @@ def _process_arguments() -> tuple: sys.argv.pop(idx) sys.argv.insert(idx, "tray") - if "--automatic_tests" in sys.argv: - automatic_tests = True - - return use_version, use_staging, commands, automatic_tests + return use_version, use_staging, commands def _determine_mongodb() -> str: @@ -1001,7 +1001,7 @@ def boot(): # Process arguments # ------------------------------------------------------------------------ - use_version, use_staging, commands, automatic_tests = _process_arguments() + use_version, use_staging, commands = _process_arguments() if os.getenv("OPENPYPE_VERSION"): if use_version: @@ -1028,9 +1028,8 @@ def boot(): os.environ["OPENPYPE_DATABASE_NAME"] = \ os.environ.get("OPENPYPE_DATABASE_NAME") or "openpype" - if automatic_tests: + if os.environ.get("IS_TEST") == "1": # change source DBs to predefined ones set for automatic testing - os.environ["IS_TEST"] = "1" os.environ["OPENPYPE_DATABASE_NAME"] += "_tests" avalon_db = os.environ.get("AVALON_DB") or "avalon" os.environ["AVALON_DB"] = avalon_db + "_tests" From 66bea60bdd8206e14b0c3e729aa54a640a883499 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 7 Nov 2022 11:40:15 +0100 Subject: [PATCH 50/68] OP-3426 - refactor - replaced check of env var with function It would be better to modify single function if necessary. --- openpype/hosts/aftereffects/api/lib.py | 3 ++- openpype/hosts/photoshop/api/lib.py | 3 ++- .../photoshop/plugins/publish/collect_batch_data.py | 3 ++- .../plugins/publish/collect_color_coded_instances.py | 3 ++- .../plugins/publish/submit_aftereffects_deadline.py | 3 ++- .../deadline/plugins/publish/submit_harmony_deadline.py | 3 ++- .../plugins/publish/submit_houdini_remote_publish.py | 3 ++- .../plugins/publish/submit_houdini_render_deadline.py | 2 +- .../deadline/plugins/publish/submit_maya_deadline.py | 3 ++- .../publish/submit_maya_remote_publish_deadline.py | 3 ++- .../deadline/plugins/publish/submit_nuke_deadline.py | 3 ++- .../deadline/plugins/publish/submit_publish_job.py | 5 +++-- openpype/plugins/publish/cleanup.py | 4 +++- openpype/plugins/publish/collect_scene_version.py | 3 ++- openpype/tests/lib.py | 9 +++++++++ 15 files changed, 38 insertions(+), 15 deletions(-) diff --git a/openpype/hosts/aftereffects/api/lib.py b/openpype/hosts/aftereffects/api/lib.py index 8cdf9c407ec..b797bde78ec 100644 --- a/openpype/hosts/aftereffects/api/lib.py +++ b/openpype/hosts/aftereffects/api/lib.py @@ -13,6 +13,7 @@ from openpype.modules import ModulesManager from openpype.tools.utils import host_tools +from openpype.tests.lib import is_in_tests from .launch_logic import ProcessLauncher, get_stub log = logging.getLogger(__name__) @@ -46,7 +47,7 @@ def main(*subprocess_args): webpublisher_addon.headless_publish, log, "CloseAE", - os.environ.get("IS_TEST") + is_in_tests() ) ) diff --git a/openpype/hosts/photoshop/api/lib.py b/openpype/hosts/photoshop/api/lib.py index 221b4314e6f..bac30bc572b 100644 --- a/openpype/hosts/photoshop/api/lib.py +++ b/openpype/hosts/photoshop/api/lib.py @@ -9,6 +9,7 @@ from openpype.modules import ModulesManager from openpype.pipeline import install_host from openpype.tools.utils import host_tools +from openpype.tests.lib import is_in_tests from .launch_logic import ProcessLauncher, stub @@ -40,7 +41,7 @@ def main(*subprocess_args): webpublisher_addon.headless_publish, log, "ClosePS", - os.environ.get("IS_TEST") + is_in_tests ) elif env_value_to_bool("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", default=True): diff --git a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py index 5d50a789148..a5fea7ac7d4 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_batch_data.py @@ -22,6 +22,7 @@ get_batch_asset_task_info, parse_json ) +from openpype.tests.lib import is_in_tests class CollectBatchData(pyblish.api.ContextPlugin): @@ -39,7 +40,7 @@ class CollectBatchData(pyblish.api.ContextPlugin): def process(self, context): self.log.info("CollectBatchData") batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA") - if os.environ.get("IS_TEST"): + if is_in_tests(): self.log.debug("Automatic testing, no batch data, skipping") return diff --git a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py index c157c932fd2..90fca8398f9 100644 --- a/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py +++ b/openpype/hosts/photoshop/plugins/publish/collect_color_coded_instances.py @@ -6,6 +6,7 @@ from openpype.lib import prepare_template_data from openpype.hosts.photoshop import api as photoshop from openpype.settings import get_project_settings +from openpype.tests.lib import is_in_tests class CollectColorCodedInstances(pyblish.api.ContextPlugin): @@ -46,7 +47,7 @@ class CollectColorCodedInstances(pyblish.api.ContextPlugin): def process(self, context): self.log.info("CollectColorCodedInstances") batch_dir = os.environ.get("OPENPYPE_PUBLISH_DATA") - if (os.environ.get("IS_TEST") and + if (is_in_tests() and (not batch_dir or not os.path.exists(batch_dir))): self.log.debug("Automatic testing, no batch data, skipping") return diff --git a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py index c6b94c2ce80..f26047bb9de 100644 --- a/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_aftereffects_deadline.py @@ -11,6 +11,7 @@ from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo +from openpype.tests.lib import is_in_tests @attr.s @@ -50,7 +51,7 @@ def get_job_info(self): context = self._instance.context batch_name = os.path.basename(self._instance.data["source"]) - if os.environ.get("IS_TEST"): + if is_in_tests(): batch_name += datetime.now().strftime("%d%m%Y%H%M%S") dln_job_info.Name = self._instance.data["name"] dln_job_info.BatchName = batch_name diff --git a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py index f98649cb1ae..425883393fa 100644 --- a/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_harmony_deadline.py @@ -13,6 +13,7 @@ from openpype.pipeline import legacy_io from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo +from openpype.tests.lib import is_in_tests class _ZipFile(ZipFile): @@ -263,7 +264,7 @@ def get_job_info(self): job_info.SecondaryPool = self._instance.data.get("secondaryPool") job_info.ChunkSize = self.chunk_size batch_name = os.path.basename(self._instance.data["source"]) - if os.environ.get("IS_TEST"): + if is_in_tests: batch_name += datetime.now().strftime("%d%m%Y%H%M%S") job_info.BatchName = batch_name job_info.Department = self.department diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py index dbd8645817c..6a62f83cae1 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_remote_publish.py @@ -8,6 +8,7 @@ import pyblish.api from openpype.pipeline import legacy_io +from openpype.tests.lib import is_in_tests class HoudiniSubmitPublishDeadline(pyblish.api.ContextPlugin): @@ -61,7 +62,7 @@ def process(self, context): job_name = "{scene} [PUBLISH]".format(scene=scenename) batch_name = "{code} - {scene}".format(code=code, scene=scenename) - if os.environ.get("IS_TEST"): + if is_in_tests(): batch_name += datetime.now().strftime("%d%m%Y%H%M%S") deadline_user = "roy" # todo: get deadline user dynamically diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index 59472fdd547..7a232df43ed 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -46,7 +46,7 @@ def process(self, instance): if code: batch_name = "{0} - {1}".format(code, batch_name) - if os.environ.get("IS_TEST"): + if is_in_tests: batch_name += datetime.now().strftime("%d%m%Y%H%M%S") # Output driver to render diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py index 7c4b7c47c56..3dd324f4746 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_deadline.py @@ -37,6 +37,7 @@ from openpype_modules.deadline import abstract_submit_deadline from openpype_modules.deadline.abstract_submit_deadline import DeadlineJobInfo +from openpype.tests.lib import is_in_tests def _validate_deadline_bool_value(instance, attribute, value): @@ -121,7 +122,7 @@ def get_job_info(self): src_filepath = context.data["currentFile"] src_filename = os.path.basename(src_filepath) - if os.environ.get("IS_TEST"): + if is_in_tests(): src_filename += datetime.now().strftime("%d%m%Y%H%M%S") job_info.Name = "%s - %s" % (src_filename, instance.name) diff --git a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py index 1023966af84..bab6591c7f3 100644 --- a/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_maya_remote_publish_deadline.py @@ -6,6 +6,7 @@ from openpype.pipeline import legacy_io, PublishXmlValidationError from openpype.settings import get_project_settings +from openpype.tests.lib import is_in_tests import pyblish.api @@ -58,7 +59,7 @@ def process(self, instance): job_name = "{scene} [PUBLISH]".format(scene=scenename) batch_name = "{code} - {scene}".format(code=project_name, scene=scenename) - if os.environ.get("IS_TEST"): + if is_in_tests(): batch_name += datetime.now().strftime("%d%m%Y%H%M%S") # Generate the payload for Deadline submission diff --git a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py index 44abf12ef49..ccb5be75dc7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_nuke_deadline.py @@ -9,6 +9,7 @@ import nuke from openpype.pipeline import legacy_io +from openpype.tests.lib import is_in_tests class NukeSubmitDeadline(pyblish.api.InstancePlugin): @@ -144,7 +145,7 @@ def payload_submit( render_dir = os.path.normpath(os.path.dirname(render_path)) batch_name = os.path.basename(script_path) jobname = "%s - %s" % (batch_name, instance.name) - if os.environ.get("IS_TEST"): + if is_in_tests(): batch_name += datetime.now().strftime("%d%m%Y%H%M%S") diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index aabc4bf84cd..3d72ddb1f97 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -18,6 +18,7 @@ get_representation_path, legacy_io, ) +from openpype.tests.lib import is_in_tests from openpype.pipeline.farm.patterning import match_aov_pattern @@ -249,7 +250,7 @@ def _submit_deadline_post_job(self, instance, job, instances): environment["OPENPYPE_USERNAME"] = instance.context.data["user"] environment["OPENPYPE_PUBLISH_JOB"] = "1" environment["OPENPYPE_RENDER_JOB"] = "0" - environment["IS_TEST"] = os.environ.get("IS_TEST") + environment["IS_TEST"] = is_in_tests() # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): mongo_url = os.environ.get("OPENPYPE_MONGO") @@ -266,7 +267,7 @@ def _submit_deadline_post_job(self, instance, job, instances): "--targets", "farm" ] - if os.environ.get("IS_TEST"): + if is_in_tests(): args.append("--automatic-tests") # Generate the payload for Deadline submission diff --git a/openpype/plugins/publish/cleanup.py b/openpype/plugins/publish/cleanup.py index 34480dd1998..ef312e391f8 100644 --- a/openpype/plugins/publish/cleanup.py +++ b/openpype/plugins/publish/cleanup.py @@ -5,6 +5,8 @@ import pyblish.api import re +from openpype.tests.lib import is_in_tests + class CleanUp(pyblish.api.InstancePlugin): """Cleans up the staging directory after a successful publish. @@ -44,7 +46,7 @@ class CleanUp(pyblish.api.InstancePlugin): def process(self, instance): """Plugin entry point.""" - if os.environ.get("IS_TEST"): + if is_in_tests(): # let automatic test process clean up temporary data return # Get the errored instances diff --git a/openpype/plugins/publish/collect_scene_version.py b/openpype/plugins/publish/collect_scene_version.py index a7cea6093a4..fdbcb3cb9d0 100644 --- a/openpype/plugins/publish/collect_scene_version.py +++ b/openpype/plugins/publish/collect_scene_version.py @@ -2,6 +2,7 @@ import pyblish.api from openpype.lib import get_version_from_path +from openpype.tests.lib import is_in_tests class CollectSceneVersion(pyblish.api.ContextPlugin): @@ -36,7 +37,7 @@ def process(self, context): # tests should be close to regular publish as possible if ( os.environ.get("HEADLESS_PUBLISH") - and not os.environ.get("IS_TEST") + and not is_in_tests() and context.data["hostName"] in self.skip_hosts_headless_publish): self.log.debug("Skipping for headless publishing") return diff --git a/openpype/tests/lib.py b/openpype/tests/lib.py index 85b90328360..1fa5fb8054e 100644 --- a/openpype/tests/lib.py +++ b/openpype/tests/lib.py @@ -78,3 +78,12 @@ def tempdir(): yield tempdir finally: shutil.rmtree(tempdir) + + +def is_in_tests(): + """Returns if process is running in automatic tests mode. + + In tests mode different source DB is used, some plugins might be disabled + etc. + """ + return os.environ.get("IS_TEST") == '1' From d395118a6c65df5673a6945e7f1f7dc4ed5ff7d2 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 7 Nov 2022 11:51:28 +0100 Subject: [PATCH 51/68] OP-3426 - fix - wrong function signature --- .../deadline/plugins/publish/submit_houdini_render_deadline.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index 7a232df43ed..a9d377069e7 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -46,7 +46,7 @@ def process(self, instance): if code: batch_name = "{0} - {1}".format(code, batch_name) - if is_in_tests: + if is_in_tests(): batch_name += datetime.now().strftime("%d%m%Y%H%M%S") # Output driver to render From 23c70e393890ab2ff1e05c3cbf402f76e7be52f1 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 7 Nov 2022 11:51:59 +0100 Subject: [PATCH 52/68] OP-3426 - fix - wrong function signature --- .../deadline/plugins/publish/submit_houdini_render_deadline.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py index a9d377069e7..2b17b644b80 100644 --- a/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_houdini_render_deadline.py @@ -9,6 +9,7 @@ # import hou ??? from openpype.pipeline import legacy_io +from openpype.tests.lib import is_in_tests class HoudiniSubmitRenderDeadline(pyblish.api.InstancePlugin): From d3769b3b737a36b6813fec38640a01179b5c014e Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 7 Nov 2022 12:50:05 +0100 Subject: [PATCH 53/68] OP-3426 - fix - add _tests suffix only if not present _tests suffix is added to avalon and openpype DB for testing run. env vars for these might be already injected, so do not add them again. --- start.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/start.py b/start.py index ec0465b0f52..e360f373a1e 100644 --- a/start.py +++ b/start.py @@ -1030,9 +1030,11 @@ def boot(): if os.environ.get("IS_TEST") == "1": # change source DBs to predefined ones set for automatic testing - os.environ["OPENPYPE_DATABASE_NAME"] += "_tests" + if "_tests" not in os.environ["OPENPYPE_DATABASE_NAME"]: + os.environ["OPENPYPE_DATABASE_NAME"] += "_tests" avalon_db = os.environ.get("AVALON_DB") or "avalon" - os.environ["AVALON_DB"] = avalon_db + "_tests" + if "_tests" not in avalon_db: + os.environ["AVALON_DB"] = avalon_db + "_tests" global_settings = get_openpype_global_settings(openpype_mongo) From e5a604d58f6b0d6b75540864374d8bdfbd5cda8a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 7 Nov 2022 18:20:12 +0100 Subject: [PATCH 54/68] OP-3426 - fix - failed class variable wasn't changed Pytest classes cannot have __init__, so this ugly way is implemented for now to keep flag of failure to check before teardown. Marking all class variables didn't work for some reason. --- tests/lib/testing_classes.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index b7993ec4baf..77e1ec4eb73 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -51,8 +51,6 @@ class ModuleUnitTest(BaseTest): TEST_DATA_FOLDER = None - failed = False - @pytest.fixture(scope='session') def monkeypatch_session(self): """Monkeypatch couldn't be used with module or session fixtures.""" @@ -83,7 +81,7 @@ def download_test_data(self, test_data_folder, persist=False): print("Temporary folder created:: {}".format(tmpdir)) yield tmpdir - persist = persist or self.PERSIST or self.failed + persist = persist or self.PERSIST or ModuleUnitTest.failed if not persist: print("Removing {}".format(tmpdir)) shutil.rmtree(tmpdir) @@ -146,7 +144,7 @@ def db_setup(self, download_test_data, env_var, monkeypatch_session): yield db_handler - persist = self.PERSIST or self.failed + persist = self.PERSIST or ModuleUnitTest.failed if not persist: db_handler.teardown(self.TEST_DB_NAME) db_handler.teardown(self.TEST_OPENPYPE_NAME) @@ -308,7 +306,7 @@ def publish_finished(self, dbcon, launched_app, download_test_data, while launched_app.poll() is None: time.sleep(0.5) if time.time() - time_start > timeout: - self.failed = True + ModuleUnitTest.failed = True launched_app.terminate() raise ValueError("Timeout reached") @@ -347,7 +345,7 @@ def test_folder_structure_same(self, dbcon, publish_finished, not_matched = expected.symmetric_difference(filtered_published) if not_matched: - self.failed = True + ModuleUnitTest.failed = True raise AssertionError("Missing {} files".format( "\n".join(sorted(not_matched)))) @@ -364,7 +362,7 @@ def publish_finished(self, dbcon, launched_app, download_test_data, while launched_app.poll() is None: time.sleep(0.5) if time.time() - time_start > timeout: - self.failed = True + ModuleUnitTest.failed = True launched_app.terminate() raise ValueError("Timeout reached") @@ -373,11 +371,11 @@ def publish_finished(self, dbcon, launched_app, download_test_data, "**/*_metadata.json"), recursive=True) if not metadata_json: - self.failed = True + ModuleUnitTest.failed = True raise RuntimeError("No metadata file found. No job id.") if len(metadata_json) > 1: - self.failed = True + ModuleUnitTest.failed = True raise RuntimeError("Too many metadata files found.") with open(metadata_json[0]) as fp: @@ -390,7 +388,7 @@ def publish_finished(self, dbcon, launched_app, download_test_data, deadline_url = deadline_module.deadline_urls["default"] if not deadline_url: - self.failed = True + ModuleUnitTest.failed = True raise ValueError("Must have default deadline url.") url = "{}/api/jobs?JobId={}".format(deadline_url, deadline_job_id) @@ -400,17 +398,17 @@ def publish_finished(self, dbcon, launched_app, download_test_data, while not valid_date_finished: time.sleep(0.5) if time.time() - time_start > timeout: - self.failed = True + ModuleUnitTest.failed = True raise ValueError("Timeout for DL finish reached") response = requests.get(url, timeout=10) if not response.ok: - self.failed = True + ModuleUnitTest.failed = True msg = "Couldn't connect to {}".format(deadline_url) raise RuntimeError(msg) if not response.json(): - self.failed = True + ModuleUnitTest.failed = True raise ValueError("Couldn't find {}".format(deadline_job_id)) # '0001-...' returned until job is finished From 12ef7357471106d5fc68b15cd8f059acb063dd3c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 7 Nov 2022 18:26:43 +0100 Subject: [PATCH 55/68] OP-3426 - fix - function must be called --- openpype/hosts/photoshop/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/photoshop/api/lib.py b/openpype/hosts/photoshop/api/lib.py index 97322d3de97..e0fd0664ef3 100644 --- a/openpype/hosts/photoshop/api/lib.py +++ b/openpype/hosts/photoshop/api/lib.py @@ -43,7 +43,7 @@ def main(*subprocess_args): webpublisher_addon.headless_publish, log, "ClosePS", - is_in_tests + is_in_tests() ) elif env_value_to_bool("AVALON_PHOTOSHOP_WORKFILES_ON_LAUNCH", default=True): From b3c79c447ef3236c7d756b6fd6469873258896fc Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Nov 2022 11:52:58 +0100 Subject: [PATCH 56/68] OP-3426 - fix - use failed as class variable failed must be used as class variable. Test classes cannot have __init__, so this weird approach used for now. --- tests/lib/testing_classes.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 77e1ec4eb73..75e208175a5 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -51,6 +51,8 @@ class ModuleUnitTest(BaseTest): TEST_DATA_FOLDER = None + failed = False + @pytest.fixture(scope='session') def monkeypatch_session(self): """Monkeypatch couldn't be used with module or session fixtures.""" From 7b10f4d0093b0555659c2c275831ddb0ca9127fb Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 8 Nov 2022 16:39:58 +0100 Subject: [PATCH 57/68] OP-3426 - fix - create context if not injected create_context gets injected from Publisher, for automatic testing it is not. DCC shouldnt have separate plugins to collect instances, they should come from Publisher/this plugin. --- .../plugins/publish/collect_from_create_context.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/openpype/plugins/publish/collect_from_create_context.py b/openpype/plugins/publish/collect_from_create_context.py index ddb6908a4cb..9a740c10cd7 100644 --- a/openpype/plugins/publish/collect_from_create_context.py +++ b/openpype/plugins/publish/collect_from_create_context.py @@ -4,7 +4,9 @@ import os import pyblish.api -from openpype.pipeline import legacy_io +from openpype.host import IPublishHost +from openpype.pipeline import legacy_io, registered_host +from openpype.pipeline.create import CreateContext class CollectFromCreateContext(pyblish.api.ContextPlugin): @@ -15,7 +17,11 @@ class CollectFromCreateContext(pyblish.api.ContextPlugin): def process(self, context): create_context = context.data.pop("create_context", None) - # Skip if create context is not available + if not create_context: + host = registered_host() + if isinstance(host, IPublishHost): + create_context = CreateContext(host) + if not create_context: return @@ -31,6 +37,7 @@ def process(self, context): context.data["projectName"] = project_name for created_instance in create_context.instances: + self.log.info(f"created_instance:: {created_instance}") instance_data = created_instance.data_to_store() if instance_data["active"]: thumbnail_path = thumbnail_paths_by_instance_id.get( From 81a3cba02544bf8e6eee2c8ff0070919f6913b1c Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Nov 2022 16:42:30 +0100 Subject: [PATCH 58/68] OP-4361 - cleaned up AE tests --- tests/integration/hosts/aftereffects/lib.py | 4 +-- .../test_deadline_publish_in_aftereffects.py | 26 +++++++++++++---- .../test_publish_in_aftereffects.py | 26 +++++++++++++---- ...test_publish_in_aftereffects_multiframe.py | 28 ++++++++++++++----- 4 files changed, 63 insertions(+), 21 deletions(-) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index c47121a035d..08dda1d4997 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -19,7 +19,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.aep") + "test_project_test_asset_test_task_v001.aep") dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, @@ -27,7 +27,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.aep") + "test_project_test_asset_test_task_v001.aep") shutil.copy(src_path, dest_path) yield dest_path diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py index 2c88f8d8019..c820a3cab8f 100644 --- a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py @@ -51,27 +51,41 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "h264_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + assert not any(failures) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index dc7c9c608d8..b4c072a0e49 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -49,27 +49,41 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "h264_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + assert not any(failures) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py index 9c5a8de0d15..2d95eada990 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -9,7 +9,7 @@ class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects - Should publish 5 frames + Should publish 10 frames """ PERSIST = True @@ -36,27 +36,41 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "h264_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + assert not any(failures) From 130e00e1951dd2817e131f327106c563510bc31b Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Nov 2022 16:48:17 +0100 Subject: [PATCH 59/68] OP-3426 - fix files filtering Filter both expected and published folders and skip temporary files. Temporary files might be pulled into expected folder when directly copying result of one test run to source zip file. --- tests/lib/testing_classes.py | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 75e208175a5..85d82b15e04 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -336,21 +336,31 @@ def test_folder_structure_same(self, dbcon, publish_finished, glob.glob(expected_dir_base + "\\**", recursive=True) if f != expected_dir_base and os.path.exists(f)) - filtered_published = set() - for pub_path in published: - if skip_compare_folders: - if not any([re.search(val, pub_path) - for val in skip_compare_folders]): - filtered_published.add(pub_path) - else: - filtered_published.add(pub_path) + filtered_published = self._filter_files(published, + skip_compare_folders) + + # filter out temp files also in expected + # could be polluted by accident by copying 'output' to zip file + filtered_expected = self._filter_files(expected, skip_compare_folders) - not_matched = expected.symmetric_difference(filtered_published) - if not_matched: + not_mtched = filtered_expected.symmetric_difference(filtered_published) + if not_mtched: ModuleUnitTest.failed = True raise AssertionError("Missing {} files".format( - "\n".join(sorted(not_matched)))) + "\n".join(sorted(not_mtched)))) + + def _filter_files(self, source_files, skip_compare_folders): + """Filter list of files according to regex pattern.""" + filtered = set() + for file_path in source_files: + if skip_compare_folders: + if not any([re.search(val, file_path) + for val in skip_compare_folders]): + filtered.add(file_path) + else: + filtered.add(file_path) + return filtered class DeadlinePublishTest(PublishTest): @pytest.fixture(scope="module") From a48c76b003b0e69a5a1ed9b1eb7bbf55d19b0576 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Nov 2022 18:18:48 +0100 Subject: [PATCH 60/68] OP-3426 - set failed when assertion error Failed is used to persist errrored run. --- .../aftereffects/test_deadline_publish_in_aftereffects.py | 4 +++- .../hosts/aftereffects/test_publish_in_aftereffects.py | 4 +++- .../aftereffects/test_publish_in_aftereffects_multiframe.py | 4 +++- tests/integration/hosts/maya/test_deadline_publish_in_maya.py | 4 ++++ tests/integration/hosts/maya/test_publish_in_maya.py | 4 ++++ tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py | 4 +++- tests/integration/hosts/nuke/test_publish_in_nuke.py | 4 +++- .../integration/hosts/photoshop/test_publish_in_photoshop.py | 4 +++- 8 files changed, 26 insertions(+), 6 deletions(-) diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py index c820a3cab8f..f64dfe08181 100644 --- a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py @@ -86,7 +86,9 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - assert not any(failures) + if any(failures): + ModuleUnitTest.failed = True + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index b4c072a0e49..4c3a0034e40 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -84,7 +84,9 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - assert not any(failures) + if any(failures): + ModuleUnitTest.failed = True + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py index 2d95eada990..39ae26e5a20 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -71,7 +71,9 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - assert not any(failures) + if any(failures): + ModuleUnitTest.failed = True + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index f23357bf8f3..789d7824602 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -95,6 +95,10 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + if any(failures): + ModuleUnitTest.failed = True + assert not any(failures) + if __name__ == "__main__": test_case = TestDeadlinePublishInMaya() diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 50e276872be..643519ab526 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -72,6 +72,10 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) + if any(failures): + ModuleUnitTest.failed = True + assert not any(failures) + if __name__ == "__main__": test_case = TestPublishInMaya() diff --git a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py index cd9cbb94f89..ff6abea56cc 100644 --- a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py @@ -77,7 +77,9 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - assert not any(failures) + if any(failures): + ModuleUnitTest.failed = True + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index f84f13fa20d..36a5a6f4f6d 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -76,7 +76,9 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - assert not any(failures) + if any(failures): + ModuleUnitTest.failed = True + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py index 4aaf43234db..0d491e6bf71 100644 --- a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py @@ -98,7 +98,9 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - assert not any(failures) + if any(failures): + ModuleUnitTest.failed = True + assert not any(failures) if __name__ == "__main__": From 51db29319aea6ab4f3578ec7f898e05a291279e7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Nov 2022 19:20:49 +0100 Subject: [PATCH 61/68] OP-3426 - allow multiple render deadline jobs Waits for publish job of last created one. --- tests/lib/testing_classes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 85d82b15e04..cc839240c76 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -387,8 +387,8 @@ def publish_finished(self, dbcon, launched_app, download_test_data, raise RuntimeError("No metadata file found. No job id.") if len(metadata_json) > 1: - ModuleUnitTest.failed = True - raise RuntimeError("Too many metadata files found.") + # depends on creation order of published jobs + metadata_json.sort(key=os.path.getmtime, reverse=True) with open(metadata_json[0]) as fp: job_info = json.load(fp) From 0c9c9c40b885c970c820992e4773b14159a60684 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Nov 2022 19:21:52 +0100 Subject: [PATCH 62/68] OP-3426 - remove not working set of failed --- .../aftereffects/test_deadline_publish_in_aftereffects.py | 4 +--- .../hosts/aftereffects/test_publish_in_aftereffects.py | 4 +--- .../aftereffects/test_publish_in_aftereffects_multiframe.py | 4 +--- tests/integration/hosts/maya/test_deadline_publish_in_maya.py | 4 +--- tests/integration/hosts/maya/test_publish_in_maya.py | 4 +--- tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py | 4 +--- tests/integration/hosts/nuke/test_publish_in_nuke.py | 4 +--- .../integration/hosts/photoshop/test_publish_in_photoshop.py | 4 +--- 8 files changed, 8 insertions(+), 24 deletions(-) diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py index f64dfe08181..c820a3cab8f 100644 --- a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py @@ -86,9 +86,7 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - if any(failures): - ModuleUnitTest.failed = True - assert not any(failures) + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index 4c3a0034e40..b4c072a0e49 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -84,9 +84,7 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - if any(failures): - ModuleUnitTest.failed = True - assert not any(failures) + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py index 39ae26e5a20..2d95eada990 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py @@ -71,9 +71,7 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - if any(failures): - ModuleUnitTest.failed = True - assert not any(failures) + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index 789d7824602..464f2462078 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -95,9 +95,7 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - if any(failures): - ModuleUnitTest.failed = True - assert not any(failures) + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 643519ab526..b7ee228aaec 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -72,9 +72,7 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - if any(failures): - ModuleUnitTest.failed = True - assert not any(failures) + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py index ff6abea56cc..cd9cbb94f89 100644 --- a/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_deadline_publish_in_nuke.py @@ -77,9 +77,7 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - if any(failures): - ModuleUnitTest.failed = True - assert not any(failures) + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 36a5a6f4f6d..f84f13fa20d 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -76,9 +76,7 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - if any(failures): - ModuleUnitTest.failed = True - assert not any(failures) + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py index 0d491e6bf71..4aaf43234db 100644 --- a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py @@ -98,9 +98,7 @@ def test_db_asserts(self, dbcon, publish_finished): DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) - if any(failures): - ModuleUnitTest.failed = True - assert not any(failures) + assert not any(failures) if __name__ == "__main__": From de2bd09a4070855733d5de90dd5eec6b58adcc2f Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Nov 2022 19:23:55 +0100 Subject: [PATCH 63/68] OP-3426 - remove failed class variable Didn't work properly, correct way is to use request fixture. --- tests/lib/testing_classes.py | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index cc839240c76..b6b4b0dd21e 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -51,8 +51,6 @@ class ModuleUnitTest(BaseTest): TEST_DATA_FOLDER = None - failed = False - @pytest.fixture(scope='session') def monkeypatch_session(self): """Monkeypatch couldn't be used with module or session fixtures.""" @@ -62,7 +60,7 @@ def monkeypatch_session(self): m.undo() @pytest.fixture(scope="module") - def download_test_data(self, test_data_folder, persist=False): + def download_test_data(self, test_data_folder, persist, request): test_data_folder = test_data_folder or self.TEST_DATA_FOLDER if test_data_folder: print("Using existing folder {}".format(test_data_folder)) @@ -83,7 +81,8 @@ def download_test_data(self, test_data_folder, persist=False): print("Temporary folder created:: {}".format(tmpdir)) yield tmpdir - persist = persist or self.PERSIST or ModuleUnitTest.failed + persist = (persist or self.PERSIST or + request.node.rep_call.failed) if not persist: print("Removing {}".format(tmpdir)) shutil.rmtree(tmpdir) @@ -130,7 +129,8 @@ def env_var(self, monkeypatch_session, download_test_data): monkeypatch_session.setenv("TEST_SOURCE_FOLDER", download_test_data) @pytest.fixture(scope="module") - def db_setup(self, download_test_data, env_var, monkeypatch_session): + def db_setup(self, download_test_data, env_var, monkeypatch_session, + request): """Restore prepared MongoDB dumps into selected DB.""" backup_dir = os.path.join(download_test_data, "input", "dumps") @@ -146,7 +146,7 @@ def db_setup(self, download_test_data, env_var, monkeypatch_session): yield db_handler - persist = self.PERSIST or ModuleUnitTest.failed + persist = self.PERSIST or request.node.rep_call.failed if not persist: db_handler.teardown(self.TEST_DB_NAME) db_handler.teardown(self.TEST_OPENPYPE_NAME) @@ -308,7 +308,6 @@ def publish_finished(self, dbcon, launched_app, download_test_data, while launched_app.poll() is None: time.sleep(0.5) if time.time() - time_start > timeout: - ModuleUnitTest.failed = True launched_app.terminate() raise ValueError("Timeout reached") @@ -345,7 +344,6 @@ def test_folder_structure_same(self, dbcon, publish_finished, not_mtched = filtered_expected.symmetric_difference(filtered_published) if not_mtched: - ModuleUnitTest.failed = True raise AssertionError("Missing {} files".format( "\n".join(sorted(not_mtched)))) @@ -362,6 +360,7 @@ def _filter_files(self, source_files, skip_compare_folders): return filtered + class DeadlinePublishTest(PublishTest): @pytest.fixture(scope="module") def publish_finished(self, dbcon, launched_app, download_test_data, @@ -374,7 +373,6 @@ def publish_finished(self, dbcon, launched_app, download_test_data, while launched_app.poll() is None: time.sleep(0.5) if time.time() - time_start > timeout: - ModuleUnitTest.failed = True launched_app.terminate() raise ValueError("Timeout reached") @@ -383,7 +381,6 @@ def publish_finished(self, dbcon, launched_app, download_test_data, "**/*_metadata.json"), recursive=True) if not metadata_json: - ModuleUnitTest.failed = True raise RuntimeError("No metadata file found. No job id.") if len(metadata_json) > 1: @@ -400,7 +397,6 @@ def publish_finished(self, dbcon, launched_app, download_test_data, deadline_url = deadline_module.deadline_urls["default"] if not deadline_url: - ModuleUnitTest.failed = True raise ValueError("Must have default deadline url.") url = "{}/api/jobs?JobId={}".format(deadline_url, deadline_job_id) @@ -410,17 +406,14 @@ def publish_finished(self, dbcon, launched_app, download_test_data, while not valid_date_finished: time.sleep(0.5) if time.time() - time_start > timeout: - ModuleUnitTest.failed = True raise ValueError("Timeout for DL finish reached") response = requests.get(url, timeout=10) if not response.ok: - ModuleUnitTest.failed = True msg = "Couldn't connect to {}".format(deadline_url) raise RuntimeError(msg) if not response.json(): - ModuleUnitTest.failed = True raise ValueError("Couldn't find {}".format(deadline_job_id)) # '0001-...' returned until job is finished From 40d2571487e2c2fcae08753b425f33711f82fd51 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Fri, 11 Nov 2022 19:30:05 +0100 Subject: [PATCH 64/68] OP-3426 - added test job for multi composition in AE in DL Depends on OP-4361 --- ...ublish_in_aftereffects_multicomposition.py | 121 ++++++++++++++++++ 1 file changed, 121 insertions(+) create mode 100644 tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects_multicomposition.py diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects_multicomposition.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects_multicomposition.py new file mode 100644 index 00000000000..f009b45f4de --- /dev/null +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects_multicomposition.py @@ -0,0 +1,121 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.aftereffects.lib import AEDeadlinePublishTestClass + +log = logging.getLogger("test_publish_in_aftereffects") + + +class TestDeadlinePublishInAfterEffectsMultiComposition(AEDeadlinePublishTestClass): # noqa + """est case for DL publishing in AfterEffects with multiple compositions. + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Opens AfterEffects, run DL publish on prepared workile. + + Test zip file sets 3 required env vars: + - HEADLESS_PUBLISH - this triggers publish immediately app is open + - IS_TEST - this differentiate between regular webpublish + - PYBLISH_TARGETS + + As there are multiple render and publish jobs, it waits for publish job + of later render job. Depends on date created of metadata.json. + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + """ + PERSIST = False + + TEST_FILES = [ + ("16xIm3U5P7WQJXpa9E06jWebMK9QKUATN", + "test_aftereffects_deadline_publish_multicomposition.zip", + "") + ] + + APP_GROUP = "aftereffects" + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 3)) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="renderTest_taskMain")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="renderTest_taskMain2")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 7)) + + additional_args = {"context.subset": "workfileTest_task", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + # renderTest_taskMain + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "png_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + # renderTest_taskMain2 + additional_args = {"context.subset": "renderTest_taskMain2", + "context.ext": "exr"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain2", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain2", + "name": "png_exr"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestDeadlinePublishInAfterEffectsMultiComposition() From 7f166ddf3fd09cbfc4dec3c5ff84e7d2c85088ed Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 14 Nov 2022 17:36:22 +0100 Subject: [PATCH 65/68] OP-3426 - fix AE tests ExtractReview for single frames was merged, therefore it is expected, that only png representation is expected. --- .../hosts/aftereffects/test_deadline_publish_in_aftereffects.py | 2 +- .../hosts/aftereffects/test_publish_in_aftereffects.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py index c820a3cab8f..04fe6cb9aae 100644 --- a/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_deadline_publish_in_aftereffects.py @@ -81,7 +81,7 @@ def test_db_asserts(self, dbcon, publish_finished): additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", - "name": "h264_png"} + "name": "png_png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py index b4c072a0e49..57d5a3e3f12 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py @@ -79,7 +79,7 @@ def test_db_asserts(self, dbcon, publish_finished): additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain", - "name": "h264_png"} + "name": "png_png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) From a20c102b854e129cc9b7dda02ffa9301ec9fc885 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 21 Nov 2022 13:13:27 +0100 Subject: [PATCH 66/68] OP-3426 - fix check for failed test If fixture fails before test is finished, there is no `rep_call` method, this way it should be more safe. --- tests/conftest.py | 12 ++++++++++++ tests/lib/testing_classes.py | 11 +++++++++-- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index aa850be1a63..7b58b0314dd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -43,3 +43,15 @@ def app_variant(request): @pytest.fixture(scope="module") def timeout(request): return request.config.getoption("--timeout") + + +@pytest.hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_makereport(item, call): + # execute all other hooks to obtain the report object + outcome = yield + rep = outcome.get_result() + + # set a report attribute for each phase of a call, which can + # be "setup", "call", "teardown" + + setattr(item, "rep_" + rep.when, rep) diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index b6b4b0dd21e..d804c43219d 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -82,7 +82,7 @@ def download_test_data(self, test_data_folder, persist, request): yield tmpdir persist = (persist or self.PERSIST or - request.node.rep_call.failed) + self.is_test_failed(request)) if not persist: print("Removing {}".format(tmpdir)) shutil.rmtree(tmpdir) @@ -146,7 +146,7 @@ def db_setup(self, download_test_data, env_var, monkeypatch_session, yield db_handler - persist = self.PERSIST or request.node.rep_call.failed + persist = self.PERSIST or self.is_test_failed(request) if not persist: db_handler.teardown(self.TEST_DB_NAME) db_handler.teardown(self.TEST_OPENPYPE_NAME) @@ -172,6 +172,13 @@ def dbcon_openpype(self, db_setup): mongo_client = OpenPypeMongoConnection.get_mongo_client() yield mongo_client[self.TEST_OPENPYPE_NAME]["settings"] + def is_test_failed(self, request): + # if request.node doesn't have rep_call, something failed + try: + return request.node.rep_call.failed + except AttributeError: + return True + class PublishTest(ModuleUnitTest): """Test class for publishing in hosts. From bd5e8285e975efe06bed49befe2b714176c36dba Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 21 Nov 2022 14:01:46 +0100 Subject: [PATCH 67/68] OP-3426 - added legacy publish in AE Instances creaed in Publisher are not working in old Pyblish, but old instances should be publishable automatically. --- .../test_publish_in_aftereffects_legacy.py | 93 +++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py new file mode 100644 index 00000000000..8c7a74c60e0 --- /dev/null +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py @@ -0,0 +1,93 @@ +import logging + +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass + +log = logging.getLogger("test_publish_in_aftereffects") + + +class TestPublishInAfterEffects(AELocalPublishTestClass): + """Basic test case for publishing in AfterEffects + + Uses old Pyblish schema of created instances. + + Uses generic TestCase to prepare fixtures for test data, testing DBs, + env vars. + + Opens AfterEffects, run publish on prepared workile. + + Test zip file sets 3 required env vars: + - HEADLESS_PUBLISH - this triggers publish immediately app is open + - IS_TEST - this differentiate between regular webpublish + - PYBLISH_TARGETS + + Then checks content of DB (if subset, version, representations were + created. + Checks tmp folder if all expected files were published. + + """ + PERSIST = False + + TEST_FILES = [ + ("1jqI_uG2NusKFvZZF7C0ScHjxFJrlc9F-", + "test_aftereffects_publish_legacy.zip", + "") + ] + + APP_GROUP = "aftereffects" + APP_VARIANT = "" + + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) + + TIMEOUT = 120 # publish timeout + + def test_db_asserts(self, dbcon, publish_finished): + """Host and input data dependent expected results in DB.""" + print("test_db_asserts") + failures = [] + + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) + + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) + + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="renderTest_taskMain")) + + failures.append( + DBAssert.count_of_types(dbcon, "representation", 4)) + + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "png_png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) + + +if __name__ == "__main__": + test_case = TestPublishInAfterEffects() From 02b7c644915d412fb6589e9261071803bb67e6ab Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 21 Nov 2022 17:12:39 +0100 Subject: [PATCH 68/68] OP-3426 - fix expected count of representation This changed as default for single frame is png, not mp4 now. --- tests/integration/hosts/maya/test_deadline_publish_in_maya.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py index 464f2462078..c5bf526f524 100644 --- a/tests/integration/hosts/maya/test_deadline_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_deadline_publish_in_maya.py @@ -90,7 +90,7 @@ def test_db_asserts(self, dbcon, publish_finished): additional_args=additional_args)) additional_args = {"context.subset": "renderTest_taskMain_beauty", - "context.ext": "h264_exr"} + "context.ext": "png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args))