From a2f1b8087c4f66909b1304064dcde50b140f43eb Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 4 Oct 2022 16:57:00 +0200 Subject: [PATCH 001/130] :construction: ground work for extractor --- openpype/hosts/maya/api/lib.py | 25 ++++ openpype/hosts/maya/api/mtoa.py | 179 ++++++++++++++++++++++++++++ openpype/hosts/maya/api/viewport.py | 19 +++ 3 files changed, 223 insertions(+) create mode 100644 openpype/hosts/maya/api/mtoa.py create mode 100644 openpype/hosts/maya/api/viewport.py diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 6a8447d6adc..410bbb34166 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -3498,3 +3498,28 @@ def dgcontext(mtime): # If no more nodes to process break the frame iterations.. if not node_dependencies: break + + +@contextlib.contextmanager +def selection(*nodes): + """Execute something with a specific Maya selection. + + Example: + .. code-block:: python + + cmds.select('side') + print(cmds.ls(sl=True)) + # ['side'] + + with selection('top', 'lambert1'): + print(cmds.ls) + # ['top', 'lambert1'] + + print(cmds.ls(sl=True)) + # ['side'] + + """ + current = cmds.ls(sl=True) + cmds.select(*nodes, noExpand=True) + yield + cmds.select(current, noExpand=True) \ No newline at end of file diff --git a/openpype/hosts/maya/api/mtoa.py b/openpype/hosts/maya/api/mtoa.py new file mode 100644 index 00000000000..6b9b1d6d443 --- /dev/null +++ b/openpype/hosts/maya/api/mtoa.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +"""Library of classes and functions deadling with MtoA functionality.""" +import tempfile +import contextlib + +import clique +import pyblish.api + +from maya import cmds + +from openpype.pipeline import publish +from .viewport import vp2_paused_context +from .lib import selection + + +class _AssExtractor(publish.Extractor): + """Base class for ASS type extractors.""" + + order = pyblish.api.ExtractorOrder + 0.01 + hosts = ["maya"] + + def get_ass_export_mask(self, maya_set): + import arnold # noqa + mask = arnold.AI_NODE_ALL + + ai_masks = {"options": {"value": arnold.AI_NODE_OPTIONS, + "default": False}, + "camera": {"value": arnold.AI_NODE_CAMERA, + "default": False}, + "light": {"value": arnold.AI_NODE_LIGHT, + "default": False}, + "shape": {"value": arnold.AI_NODE_SHAPE, + "default": True}, + "shader": {"value": arnold.AI_NODE_SHADER, + "default": True}, + "override": {"value": arnold.AI_NODE_OVERRIDE, + "default": False}, + "driver": {"value": arnold.AI_NODE_DRIVER, + "default": False}, + "filter": {"value": arnold.AI_NODE_FILTER, + "default": False}, + "color_manager": {"value": arnold.AI_NODE_COLOR_MANAGER, + "default": True}, + "operator": {"value": arnold.AI_NODE_OPERATOR, + "default": True}} + + for mask_name, mask_data in ai_masks.items(): + attr = "inf_ass_export_{}".format(mask_name) + + submask = self.get_set_attr("{}.{}".format(maya_set, attr), + default=mask_data["default"]) + + if not submask: + mask = mask ^ mask_data["value"] + + return mask + + def process(self, instance): + + dry_run = instance.data.get("ass.rr") + + staging_dir = self.staging_dir(instance) + sequence = instance.data.get("exportSequence", False) + + if not cmds.pluginInfo("mtoa", query=True, loaded=True): + cmds.loadPlugin("mtoa") + + # Export to a temporal path + export_dir = instance.context.data["stagingDir"] + export_path = tempfile.NamedTemporaryFile(suffix=".ass", + dir=export_dir, + delete=False) + + set_ = instance.data["set"] + kwargs = {"shadowLinks": 1, + "lightLinks": 1, + "boundingBox": True, + "selected": True, + "f": export_path.name} + + # Animation + + if sequence: + mask = self.get_ass_export_mask(set_) + start = instance.data.get("frameStartHandle", 1) + end = instance.data.get("frameEndHandle", 1) + step = instance.data.get("step", 1.0) + if start is not None: + kwargs["startFrame"] = float(start) + kwargs["endFrame"] = float(end) + kwargs["frameStep"] = float(step) + else: + mask = 44 + + # Generic options + if self.get_set_attr("{}.inf_ass_expand_procedurals".format(set_), + False): + kwargs["expandProcedurals"] = True + + if self.get_set_attr("{}.inf_ass_fullpath".format(set_), + True): + kwargs["fullPath"] = True + + kwargs["mask"] = mask + + # Motion blur + mb = self.get_set_attr("{}.inf_ass_motion_blur".format(set_), False) + keys = self.get_set_attr("{}.inf_ass_mb_keys".format(set_), -1) + length = self.get_set_attr("{}.inf_ass_mb_length".format(set_), -1) + + targets = self.get_targets(instance) + + _sorted_kwargs = sorted(kwargs.items(), key=lambda x: x[0]) + _sorted_kwargs = ["{}={!r}".format(x, y) for x, y in _sorted_kwargs] + + if not dry_run: + self.log.debug("Running command: cmds.arnoldExportAss({})" + .format(", ".join(_sorted_kwargs))) + with vp2_paused_context(): + with selection(targets): + with self.motion_blur_ctx(mb, keys, length): + result = cmds.arnoldExportAss(**kwargs) + else: + instance.data["assExportKwargs"] = kwargs + start = kwargs.get("startFrame") + end = kwargs.get("endFrame") + result = [] + + range_ = [0] + if start is not None: + range_ = range(int(start), int(end) + 1) + + for i in range_: + fp = "{}.{:03d}.ass".format(export_path.name, i) + with open(fp, "w"): + pass + result.append(fp) + + if len(result) == 1: + filepath = result[0] + else: + collection = clique.assemble(result)[0][0] + filepath = collection.format() + + # Register the file + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'ass', + 'ext': 'ass', + 'files': filepath, + "stagingDir": staging_dir + } + + instance.data["representations"].append(representation) + + @contextlib.contextmanager + def motion_blur_ctx(self, force, keys, length): + if not force: + yield + return + + cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") + ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") + clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") + + cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) + if keys > 0: + cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) + if length >= 0: + cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) + + try: + yield + finally: + cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) + cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) + cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) diff --git a/openpype/hosts/maya/api/viewport.py b/openpype/hosts/maya/api/viewport.py new file mode 100644 index 00000000000..cbf78ab8157 --- /dev/null +++ b/openpype/hosts/maya/api/viewport.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +"""Tools for working with viewport in Maya.""" +import contextlib +from maya import cmds # noqa + + +@contextlib.contextmanager +def vp2_paused_context(): + """Context manager to stop updating of vp2 viewport.""" + state = cmds.ogs(pause=True, query=True) + + if not state: + cmds.ogs(pause=True) + + try: + yield + finally: + if cmds.ogs(pause=True, query=True) != state: + cmds.ogs(pause=True) From 7938f843f0bdd6c9ae56f6978d3e5f211b5495a3 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 20:52:40 +0800 Subject: [PATCH 002/130] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 6 +++++- openpype/settings/defaults/project_settings/maya.json | 1 + .../schemas/schema_maya_render_settings.json | 5 +++++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 2b996702c35..9acb65b84c2 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -108,8 +108,10 @@ def _set_arnold_settings(self, width, height): # function to revert render settings does not reset AOVs list in MtoA # Fetch current aovs in case there's any. current_aovs = AOVInterface().getAOVs() + remove_aovs = arnold_render_presets["remove_aovs"] + if remove_aovs: # Remove fetched AOVs - AOVInterface().removeAOVs(current_aovs) + AOVInterface().removeAOVs(current_aovs) mel.eval("unifiedRenderGlobalsRevertToDefault") img_ext = arnold_render_presets["image_format"] img_prefix = arnold_render_presets["image_prefix"] @@ -118,6 +120,8 @@ def _set_arnold_settings(self, width, height): multi_exr = arnold_render_presets["multilayer_exr"] additional_options = arnold_render_presets["additional_options"] for aov in aovs: + if aov in current_aovs and not remove_aovs: + continue AOVInterface('defaultArnoldRenderOptions').addAOV(aov) cmds.setAttr("defaultResolution.width", width) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a2..958025baeb9 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -64,6 +64,7 @@ "image_prefix": "//_", "image_format": "exr", "multilayer_exr": true, + "remove_aovs": true, "tiled": true, "aov_list": [], "additional_options": [] diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 0cbb684fc6e..9beea16b97c 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -69,6 +69,11 @@ "label": "Multilayer (exr)", "type": "boolean" }, + { + "key": "remove_aovs", + "label": "Remove AOVs", + "type": "boolean" + }, { "key": "tiled", "label": "Tiled (tif, exr)", From 377e6e88e1182871a1fc0cfe293e8ae697777a33 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 10 Nov 2022 20:53:24 +0800 Subject: [PATCH 003/130] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 958025baeb9..09f3e613917 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -64,7 +64,7 @@ "image_prefix": "//_", "image_format": "exr", "multilayer_exr": true, - "remove_aovs": true, + "remove_aovs": false, "tiled": true, "aov_list": [], "additional_options": [] From 365feefb48189989976a95f65ec1d8a7fc6467cf Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 19:13:23 +0800 Subject: [PATCH 004/130] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/settings/defaults/project_settings/maya.json | 2 +- .../schemas/schema_maya_render_settings.json | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 09f3e613917..54b70b4a445 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -64,8 +64,8 @@ "image_prefix": "//_", "image_format": "exr", "multilayer_exr": true, - "remove_aovs": false, "tiled": true, + "remove_aovs": false, "aov_list": [], "additional_options": [] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 9beea16b97c..98d33ade91c 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -70,13 +70,13 @@ "type": "boolean" }, { - "key": "remove_aovs", - "label": "Remove AOVs", + "key": "tiled", + "label": "Tiled (tif, exr)", "type": "boolean" }, { - "key": "tiled", - "label": "Tiled (tif, exr)", + "key": "remove_aovs", + "label": "Remove existing AOVs", "type": "boolean" }, { From 6a1846a36b06184fa16122c4c75b872baf29a012 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 21:10:29 +0800 Subject: [PATCH 005/130] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 5 +++-- openpype/settings/defaults/project_settings/maya.json | 2 +- .../schemas/schema_maya_render_settings.json | 10 +++++----- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 9acb65b84c2..24c183b9380 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -101,14 +101,15 @@ def _set_arnold_settings(self, width, height): from mtoa.core import createOptions # noqa from mtoa.aovs import AOVInterface # noqa createOptions() - arnold_render_presets = self._project_settings["maya"]["RenderSettings"]["arnold_renderer"] # noqa + render_settings = self._project_settings["maya"]["RenderSettings"] + arnold_render_presets = render_settings["arnold_renderer"] # noqa # Force resetting settings and AOV list to avoid having to deal with # AOV checking logic, for now. # This is a work around because the standard # function to revert render settings does not reset AOVs list in MtoA # Fetch current aovs in case there's any. current_aovs = AOVInterface().getAOVs() - remove_aovs = arnold_render_presets["remove_aovs"] + remove_aovs = render_settings["remove_aovs"] if remove_aovs: # Remove fetched AOVs AOVInterface().removeAOVs(current_aovs) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 54b70b4a445..f97ea47b52b 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -59,13 +59,13 @@ "default_render_image_folder": "renders/maya", "enable_all_lights": true, "aov_separator": "underscore", + "remove_aovs": false, "reset_current_frame": false, "arnold_renderer": { "image_prefix": "//_", "image_format": "exr", "multilayer_exr": true, "tiled": true, - "remove_aovs": false, "aov_list": [], "additional_options": [] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 98d33ade91c..c1bafc41082 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -31,6 +31,11 @@ {"dot": ". (dot)"} ] }, + { + "key": "remove_aovs", + "label": "Remove existing AOVs", + "type": "boolean" + }, { "key": "reset_current_frame", "label": "Reset Current Frame", @@ -74,11 +79,6 @@ "label": "Tiled (tif, exr)", "type": "boolean" }, - { - "key": "remove_aovs", - "label": "Remove existing AOVs", - "type": "boolean" - }, { "key": "aov_list", "label": "AOVs to create", From b17eb8df7d42f2b5fa5680785e4f5570f8f7fd9e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 21:24:57 +0800 Subject: [PATCH 006/130] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 24c183b9380..3e7e62a7a8d 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -146,12 +146,17 @@ def _set_arnold_settings(self, width, height): def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" - redshift_render_presets = ( - self._project_settings - ["maya"] - ["RenderSettings"] - ["redshift_renderer"] - ) + render_settings = self._project_settings["maya"]["RenderSettings"] + redshift_render_presets = render_settings["redshift_renderer"] + + remove_aovs = render_settings["remove_aovs"] + if remove_aovs: + aovs = cmds.ls(type='RedshiftAOV') + for aov in aovs: + enabled = cmds.getAttr("{}.enabled".format(aov)) + if enabled: + cmds.delete(aov) + additional_options = redshift_render_presets["additional_options"] ext = redshift_render_presets["image_format"] img_exts = ["iff", "exr", "tif", "png", "tga", "jpg"] From 85d8edcd00edac8ab5b198b014f0f86acde6f1b6 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 21:25:51 +0800 Subject: [PATCH 007/130] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 3e7e62a7a8d..bc817c862ef 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -146,7 +146,7 @@ def _set_arnold_settings(self, width, height): def _set_redshift_settings(self, width, height): """Sets settings for Redshift.""" - render_settings = self._project_settings["maya"]["RenderSettings"] + render_settings = self._project_settings["maya"]["RenderSettings"] redshift_render_presets = render_settings["redshift_renderer"] remove_aovs = render_settings["remove_aovs"] From 2e2386a349c82d244ae5c0383a865c1d72237c56 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 14 Nov 2022 22:47:33 +0800 Subject: [PATCH 008/130] adding removeAOVs in setting to allow users to choose whether keeping the aovs --- openpype/hosts/maya/api/lib_rendersettings.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index bc817c862ef..fa09e26e9eb 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -173,12 +173,16 @@ def _set_vray_settings(self, aov_separator, width, height): """Sets important settings for Vray.""" settings = cmds.ls(type="VRaySettingsNode") node = settings[0] if settings else cmds.createNode("VRaySettingsNode") - vray_render_presets = ( - self._project_settings - ["maya"] - ["RenderSettings"] - ["vray_renderer"] - ) + render_settings = self._project_settings["maya"]["RenderSettings"] + vray_render_presets = render_settings["vray_renderer"] + # vrayRenderElement + remove_aovs = vray_render_presets["remove_aovs"] + if remove_aovs: + aovs = cmds.ls(type='VRayRenderElement') + for aov in aovs: + enabled = cmds.getAttr("{}.enabled".format(aov)) + if enabled: + cmds.delete(aov) # Set aov separator # First we need to explicitly set the UI items in Render Settings # because that is also what V-Ray updates to when that Render Settings From 6f65ea4f54590e0e3267880aa64454a56e490005 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 15 Nov 2022 02:39:31 +0800 Subject: [PATCH 009/130] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index fa09e26e9eb..de849db21cd 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -176,10 +176,11 @@ def _set_vray_settings(self, aov_separator, width, height): render_settings = self._project_settings["maya"]["RenderSettings"] vray_render_presets = render_settings["vray_renderer"] # vrayRenderElement - remove_aovs = vray_render_presets["remove_aovs"] + remove_aovs = render_settings["remove_aovs"] if remove_aovs: aovs = cmds.ls(type='VRayRenderElement') for aov in aovs: + # remove all aovs except LightSelect enabled = cmds.getAttr("{}.enabled".format(aov)) if enabled: cmds.delete(aov) From 6190e6ba111f66bf3be8beb52b54191aa8fd272f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 15 Nov 2022 03:07:22 +0800 Subject: [PATCH 010/130] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index de849db21cd..f64a86ee078 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -184,6 +184,12 @@ def _set_vray_settings(self, aov_separator, width, height): enabled = cmds.getAttr("{}.enabled".format(aov)) if enabled: cmds.delete(aov) + # remove LightSelect + lightSelect_aovs =cmds.ls(type='VRayRenderElementSet') + for light_aovs in lightSelect_aovs: + light_enabled = cmds.getAttr("{}.enabled".format(light_aovs)) + if light_enabled: + cmds.delete(lightSelect_aovs) # Set aov separator # First we need to explicitly set the UI items in Render Settings # because that is also what V-Ray updates to when that Render Settings From 1b00dec8de85264a05dbb2cdfb5528c3864b094f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 15 Nov 2022 03:08:08 +0800 Subject: [PATCH 011/130] adding removeAOVs in setting to allow users to choose whether keeping existing AOVs --- openpype/hosts/maya/api/lib_rendersettings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index f64a86ee078..1293f1287d9 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -185,7 +185,7 @@ def _set_vray_settings(self, aov_separator, width, height): if enabled: cmds.delete(aov) # remove LightSelect - lightSelect_aovs =cmds.ls(type='VRayRenderElementSet') + lightSelect_aovs = cmds.ls(type='VRayRenderElementSet') for light_aovs in lightSelect_aovs: light_enabled = cmds.getAttr("{}.enabled".format(light_aovs)) if light_enabled: From 09be994ca11cf21c95e2e93e075e21c9ea190bbf Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 15 Nov 2022 20:48:14 +0800 Subject: [PATCH 012/130] layout publish more than one container issue --- .../maya/plugins/publish/extract_layout.py | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index a801d99f42d..a5131efca98 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -15,6 +15,7 @@ class ExtractLayout(publish.Extractor): label = "Extract Layout" hosts = ["maya"] families = ["layout"] + project_container = "AVALON_CONTAINERS" optional = True def process(self, instance): @@ -33,13 +34,17 @@ def process(self, instance): for asset in cmds.sets(str(instance), query=True): # Find the container - grp_name = asset.split(':')[0] - containers = cmds.ls("{}*_CON".format(grp_name)) - - assert len(containers) == 1, \ - "More than one container found for {}".format(asset) - - container = containers[0] + project_container = self.project_container + container_list = cmds.ls(project_container) + assert len(container_list) == 1, \ + "No project container found for {} " \ + "Please create instance with loaded asset".format(asset) + containers = cmds.sets(project_container, query=True) + for con in containers: + if "_CON" not in con: + assert containers == [], \ + "No container found for {}".format(asset) + container = con representation_id = cmds.getAttr( "{}.representation".format(container)) From 0dff5e86c5f3f7b0e917c1538feb4ee23a6eb034 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 15 Nov 2022 21:08:48 +0800 Subject: [PATCH 013/130] layout publish more than one container issue --- openpype/hosts/maya/plugins/publish/extract_layout.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index a5131efca98..c6eca0b05eb 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -37,13 +37,12 @@ def process(self, instance): project_container = self.project_container container_list = cmds.ls(project_container) assert len(container_list) == 1, \ - "No project container found for {} " \ - "Please create instance with loaded asset".format(asset) + "Please create instance with loaded asset" containers = cmds.sets(project_container, query=True) for con in containers: if "_CON" not in con: assert containers == [], \ - "No container found for {}".format(asset) + "No container found for {}".format(asset) container = con representation_id = cmds.getAttr( From d5b1f58fd25c4fac3bfdc1bc1311ee2913f70498 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 17 Nov 2022 19:16:13 +0800 Subject: [PATCH 014/130] layout publish more than one container issue --- openpype/hosts/maya/plugins/publish/extract_layout.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index c6eca0b05eb..f77835d47fa 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -39,10 +39,15 @@ def process(self, instance): assert len(container_list) == 1, \ "Please create instance with loaded asset" containers = cmds.sets(project_container, query=True) + load_asset = asset.split(':')[0] for con in containers: - if "_CON" not in con: + ass_transform = cmds.listRelatives(con, allParents=True)[0] + if load_asset not in ass_transform: assert containers == [], \ "No container found for {}".format(asset) + if "_CON" not in con: + assert containers == [], \ + "Container missing for {}".format(asset) container = con representation_id = cmds.getAttr( From bc35e8b3a3c879e74bd4b79c65d4d87ec8394c97 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 19 Nov 2022 15:51:48 +0800 Subject: [PATCH 015/130] instances for bb geometry and publisher for bb geometry --- .../maya/plugins/create/create_proxy_abc.py | 42 ++++++++ .../maya/plugins/publish/collect_proxy_abc.py | 14 +++ .../maya/plugins/publish/extract_proxy_abc.py | 96 +++++++++++++++++++ .../defaults/project_settings/maya.json | 6 ++ .../schemas/schema_maya_create.json | 4 + 5 files changed, 162 insertions(+) create mode 100644 openpype/hosts/maya/plugins/create/create_proxy_abc.py create mode 100644 openpype/hosts/maya/plugins/publish/collect_proxy_abc.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_proxy_abc.py diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py new file mode 100644 index 00000000000..f9671dfccfc --- /dev/null +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -0,0 +1,42 @@ +from openpype.hosts.maya.api import ( + lib, + plugin +) + + +class CreateProxyAlembic(plugin.Creator): + """Proxy Alembic for animated data""" + + name = "proxyAbcMain" + label = "Proxy Alembic" + family = "proxyAbc" + icon = "gears" + write_color_sets = False + write_face_sets = False + + + def __init__(self, *args, **kwargs): + super(CreateProxyAlembic, self).__init__(*args, **kwargs) + + # Add animation data + self.data.update(lib.collect_animation_data()) + + # Vertex colors with the geometry. + self.data["writeColorSets"] = self.write_color_sets + # Vertex colors with the geometry. + self.data["writeFaceSets"] = self.write_face_sets + # Include parent groups + self.data["includeParentHierarchy"] = False + # only nodes which are visible + self.data["visibleOnly"] = False + # Default to exporting world-space + self.data["worldSpace"] = True + + # Creating a single bounding box per shape selected + self.data["single"] = False + # name suffix for the bounding box + self.data["nameSuffix"] = "_BBox" + + # Add options for custom attributes + self.data["attr"] = "" + self.data["attrPrefix"] = "" diff --git a/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py b/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py new file mode 100644 index 00000000000..2a7890fcace --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py @@ -0,0 +1,14 @@ +import pyblish.api + +class CollectProxyAlembic(pyblish.api.InstancePlugin): + """Collect Proxy Alembic for instance.""" + + order = pyblish.api.CollectorOrder + 0.45 + families = ["proxyAbc"] + label = "Collect Proxy Alembic" + hosts = ["maya"] + + def process(self, instance): + """Collector entry point.""" + if not instance.data.get('families'): + instance.data["families"] = [] diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py new file mode 100644 index 00000000000..b1306edac57 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -0,0 +1,96 @@ +import os + +from maya import cmds + +from openpype.pipeline import publish +from openpype.hosts.maya.api.lib import ( + extract_alembic, + suspended_refresh, + maintained_selection, + iter_visible_nodes_in_range +) + + +class ExtractAlembic(publish.Extractor): + """Produce an alembic for bounding box geometry + """ + + label = "Extract Proxy (Alembic)" + hosts = ["maya"] + families = ["proxyAbc"] + + def process(self, instance): + + nodes, roots = self.get_members_and_roots(instance) + start = float(instance.data.get("frameStartHandle", 1)) + end = float(instance.data.get("frameEndHandle", 1)) + + attrs = instance.data.get("attr", "").split(";") + attrs = [value for value in attrs if value.strip()] + attrs += ["cbId"] + + attr_prefixes = instance.data.get("attrPrefix", "").split(";") + attr_prefixes = [value for value in attr_prefixes if value.strip()] + + self.log.info("Extracting Proxy Meshes...") + + dirname = self.staging_dir(instance) + filename = "{name}.abc".format(**instance.data) + path = os.path.join(dirname, filename) + + options = { + "step": instance.data.get("step", 1.0), + "attr": attrs, + "attrPrefix": attr_prefixes, + "writeVisibility": True, + "writeCreases": True, + "writeColorSets": instance.data.get("writeColorSets", False), + "writeFaceSets": instance.data.get("writeFaceSets", False), + "uvWrite": True, + "selection": True, + "worldSpace": instance.data.get("worldSpace", True) + } + + if not instance.data.get("includeParentHierarchy", True): + + options["root"] = roots + if instance.data.get("visibleOnly", False): + nodes = list(iter_visible_nodes_in_range(nodes, + start=start, + end=end)) + with suspended_refresh(): + with maintained_selection(): + # TODO: select the bb geometry + self.create_proxy_geometry(instance, + start, + end) + extract_alembic(file=path, + startFrame=start, + endFrame=end, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + 'stagingDir': dirname + } + instance.data["representations"].append(representation) + + instance.context.data["cleanupFullPaths"].append(path) + + self.log.info("Extracted {} to {}".format(instance, dirname)) + #TODO: delete the bounding box + + def get_members_and_roots(self, instance): + return instance[:], instance.data.get("setMembers") + + def create_proxy_geometry(self, instance, start, end): + + inst_selection = cmds.ls(instance.name, long=True) + name_suffix = instance.data.get("nameSuffix") + if instance.data.get("single", True): + pass diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a2..cb9af2c2b61 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -197,6 +197,12 @@ "Main" ] }, + "CreateProxyAlembic": { + "enabled": true, + "defaults": [ + "Main" + ] + }, "CreateRenderSetup": { "enabled": true, "defaults": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index bc6520474d5..8512736211b 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -237,6 +237,10 @@ "key": "CreateMayaScene", "label": "Create Maya Scene" }, + { + "key": "CreateProxyAlembic", + "label": "Create Proxy Alembic" + }, { "key": "CreateRenderSetup", "label": "Create Render Setup" From 75175b8a747a06c14c52a8d95bb951bc083e406a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 21 Nov 2022 16:02:13 +0800 Subject: [PATCH 016/130] create and publish bb geometry --- openpype/hosts/maya/plugins/load/actions.py | 2 + .../maya/plugins/publish/collect_proxy_abc.py | 14 ------ .../maya/plugins/publish/extract_proxy_abc.py | 43 +++++++++++++------ .../plugins/publish/validate_frame_range.py | 1 + .../defaults/project_settings/maya.json | 6 +++ .../schemas/schema_maya_publish.json | 20 +++++++++ 6 files changed, 58 insertions(+), 28 deletions(-) delete mode 100644 openpype/hosts/maya/plugins/publish/collect_proxy_abc.py diff --git a/openpype/hosts/maya/plugins/load/actions.py b/openpype/hosts/maya/plugins/load/actions.py index eca1b27f345..9cc9180d6e6 100644 --- a/openpype/hosts/maya/plugins/load/actions.py +++ b/openpype/hosts/maya/plugins/load/actions.py @@ -14,6 +14,7 @@ class SetFrameRangeLoader(load.LoaderPlugin): families = ["animation", "camera", + "proxyAbc", "pointcache"] representations = ["abc"] @@ -48,6 +49,7 @@ class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): families = ["animation", "camera", + "proxyAbc", "pointcache"] representations = ["abc"] diff --git a/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py b/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py deleted file mode 100644 index 2a7890fcace..00000000000 --- a/openpype/hosts/maya/plugins/publish/collect_proxy_abc.py +++ /dev/null @@ -1,14 +0,0 @@ -import pyblish.api - -class CollectProxyAlembic(pyblish.api.InstancePlugin): - """Collect Proxy Alembic for instance.""" - - order = pyblish.api.CollectorOrder + 0.45 - families = ["proxyAbc"] - label = "Collect Proxy Alembic" - hosts = ["maya"] - - def process(self, instance): - """Collector entry point.""" - if not instance.data.get('families'): - instance.data["families"] = [] diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index b1306edac57..4607fd8a4b1 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -11,7 +11,7 @@ ) -class ExtractAlembic(publish.Extractor): +class ExtractProxyAlembic(publish.Extractor): """Produce an alembic for bounding box geometry """ @@ -22,6 +22,8 @@ class ExtractAlembic(publish.Extractor): def process(self, instance): nodes, roots = self.get_members_and_roots(instance) + + # Collect the start and end including handles start = float(instance.data.get("frameStartHandle", 1)) end = float(instance.data.get("frameEndHandle", 1)) @@ -32,9 +34,9 @@ def process(self, instance): attr_prefixes = instance.data.get("attrPrefix", "").split(";") attr_prefixes = [value for value in attr_prefixes if value.strip()] - self.log.info("Extracting Proxy Meshes...") - + self.log.info("Extracting pointcache..") dirname = self.staging_dir(instance) + filename = "{name}.abc".format(**instance.data) path = os.path.join(dirname, filename) @@ -52,18 +54,17 @@ def process(self, instance): } if not instance.data.get("includeParentHierarchy", True): - options["root"] = roots + + if instance.data.get("visibleOnly", False): nodes = list(iter_visible_nodes_in_range(nodes, start=start, end=end)) + with suspended_refresh(): with maintained_selection(): - # TODO: select the bb geometry - self.create_proxy_geometry(instance, - start, - end) + self.create_proxy_geometry(instance, nodes, start, end) extract_alembic(file=path, startFrame=start, endFrame=end, @@ -76,21 +77,35 @@ def process(self, instance): 'name': 'abc', 'ext': 'abc', 'files': filename, - 'stagingDir': dirname + "stagingDir": dirname } instance.data["representations"].append(representation) instance.context.data["cleanupFullPaths"].append(path) self.log.info("Extracted {} to {}".format(instance, dirname)) - #TODO: delete the bounding box def get_members_and_roots(self, instance): return instance[:], instance.data.get("setMembers") - def create_proxy_geometry(self, instance, start, end): - - inst_selection = cmds.ls(instance.name, long=True) + def create_proxy_geometry(self, instance, node, start, end): + inst_selection = cmds.ls(node, long=True) name_suffix = instance.data.get("nameSuffix") if instance.data.get("single", True): - pass + cmds.geomToBBox(inst_selection, + name=instance.name, + nameSuffix=name_suffix, + single=True, + keepOriginal=True, + bakeAnimation=True, + startTime=start, + endTime=end) + else: + cmds.geomToBBox(inst_selection, + name=instance.name, + nameSuffix=name_suffix, + single=False, + keepOriginal=True, + bakeAnimation=True, + startTime=start, + endTime=end) diff --git a/openpype/hosts/maya/plugins/publish/validate_frame_range.py b/openpype/hosts/maya/plugins/publish/validate_frame_range.py index b467a7c2326..5e50ae72cd1 100644 --- a/openpype/hosts/maya/plugins/publish/validate_frame_range.py +++ b/openpype/hosts/maya/plugins/publish/validate_frame_range.py @@ -25,6 +25,7 @@ class ValidateFrameRange(pyblish.api.InstancePlugin): families = ["animation", "pointcache", "camera", + "proxyAbc", "renderlayer", "review", "yeticache"] diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index cb9af2c2b61..bfa3c9f0fbe 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -575,6 +575,12 @@ "optional": false, "active": true }, + "ExtractProxyAlembic": { + "enabled": true, + "families": [ + "proxyAbc" + ] + }, "ExtractAlembic": { "enabled": true, "families": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index ab8c6b885ec..2c6260db306 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -638,6 +638,26 @@ "type": "label", "label": "Extractors" }, + { + "type": "dict", + "collapsible": true, + "key": "ExtractProxyAlembic", + "label": "Extract Proxy Alembic", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + } + ] + }, { "type": "dict", "collapsible": true, From 302c5fe21456e318e24451d2a6df190bc2dce449 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 22 Nov 2022 18:52:26 +0800 Subject: [PATCH 017/130] create and publish bb-geometry --- .../maya/plugins/create/create_proxy_abc.py | 2 + .../maya/plugins/load/load_abc_to_standin.py | 2 +- .../hosts/maya/plugins/load/load_gpucache.py | 2 +- .../hosts/maya/plugins/load/load_reference.py | 1 + .../maya/plugins/publish/extract_proxy_abc.py | 43 +++++++++---------- .../plugins/publish/collect_resources_path.py | 1 + openpype/plugins/publish/integrate.py | 1 + openpype/plugins/publish/integrate_legacy.py | 1 + .../defaults/project_settings/maya.json | 3 +- .../schemas/template_publish_families.json | 1 + 10 files changed, 32 insertions(+), 25 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py index f9671dfccfc..57978cb4d96 100644 --- a/openpype/hosts/maya/plugins/create/create_proxy_abc.py +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -34,6 +34,8 @@ def __init__(self, *args, **kwargs): # Creating a single bounding box per shape selected self.data["single"] = False + # remove the bbBox after publish + #self.data["removeBoundingBoxAfterPublish"] = False # name suffix for the bounding box self.data["nameSuffix"] = "_BBox" diff --git a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py index 605a492e4d1..70866a3ba69 100644 --- a/openpype/hosts/maya/plugins/load/load_abc_to_standin.py +++ b/openpype/hosts/maya/plugins/load/load_abc_to_standin.py @@ -11,7 +11,7 @@ class AlembicStandinLoader(load.LoaderPlugin): """Load Alembic as Arnold Standin""" - families = ["animation", "model", "pointcache"] + families = ["animation", "model", "proxyAbc", "pointcache"] representations = ["abc"] label = "Import Alembic as Arnold Standin" diff --git a/openpype/hosts/maya/plugins/load/load_gpucache.py b/openpype/hosts/maya/plugins/load/load_gpucache.py index a09f924c7b5..07e5734f43d 100644 --- a/openpype/hosts/maya/plugins/load/load_gpucache.py +++ b/openpype/hosts/maya/plugins/load/load_gpucache.py @@ -10,7 +10,7 @@ class GpuCacheLoader(load.LoaderPlugin): """Load Alembic as gpuCache""" - families = ["model", "animation", "pointcache"] + families = ["model", "animation", "proxyAbc", "pointcache"] representations = ["abc"] label = "Import Gpu Cache" diff --git a/openpype/hosts/maya/plugins/load/load_reference.py b/openpype/hosts/maya/plugins/load/load_reference.py index c762a29326a..c6b07b036d7 100644 --- a/openpype/hosts/maya/plugins/load/load_reference.py +++ b/openpype/hosts/maya/plugins/load/load_reference.py @@ -16,6 +16,7 @@ class ReferenceLoader(openpype.hosts.maya.api.plugin.ReferenceLoader): families = ["model", "pointcache", + "proxyAbc", "animation", "mayaAscii", "mayaScene", diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index 4607fd8a4b1..ee389795771 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -22,7 +22,8 @@ class ExtractProxyAlembic(publish.Extractor): def process(self, instance): nodes, roots = self.get_members_and_roots(instance) - + self.log.info("nodes:{}".format(nodes)) + self.log.info("roots:{}".format(roots)) # Collect the start and end including handles start = float(instance.data.get("frameStartHandle", 1)) end = float(instance.data.get("frameEndHandle", 1)) @@ -34,7 +35,7 @@ def process(self, instance): attr_prefixes = instance.data.get("attrPrefix", "").split(";") attr_prefixes = [value for value in attr_prefixes if value.strip()] - self.log.info("Extracting pointcache..") + self.log.info("Extracting Proxy Alembic..") dirname = self.staging_dir(instance) filename = "{name}.abc".format(**instance.data) @@ -55,16 +56,22 @@ def process(self, instance): if not instance.data.get("includeParentHierarchy", True): options["root"] = roots + self.log.info("{}".format(options["root"])) + if int(cmds.about(version=True)) >= 2017: + # Since Maya 2017 alembic supports multiple uv sets - write them. + options["writeUVSets"] = True if instance.data.get("visibleOnly", False): nodes = list(iter_visible_nodes_in_range(nodes, start=start, end=end)) - with suspended_refresh(): with maintained_selection(): - self.create_proxy_geometry(instance, nodes, start, end) + self.create_proxy_geometry(instance, + nodes, + start, + end) extract_alembic(file=path, startFrame=start, endFrame=end, @@ -91,21 +98,13 @@ def get_members_and_roots(self, instance): def create_proxy_geometry(self, instance, node, start, end): inst_selection = cmds.ls(node, long=True) name_suffix = instance.data.get("nameSuffix") - if instance.data.get("single", True): - cmds.geomToBBox(inst_selection, - name=instance.name, - nameSuffix=name_suffix, - single=True, - keepOriginal=True, - bakeAnimation=True, - startTime=start, - endTime=end) - else: - cmds.geomToBBox(inst_selection, - name=instance.name, - nameSuffix=name_suffix, - single=False, - keepOriginal=True, - bakeAnimation=True, - startTime=start, - endTime=end) + bbox = cmds.geomToBBox(inst_selection, + name=instance.name, + nameSuffix=name_suffix, + single=instance.data.get("single", False), + keepOriginal=True, + bakeAnimation=True, + startTime=start, + endTime=end) + return cmds.select(bbox, noExpand=True) + diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 00f65b8b672..90aa0f44bb0 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -21,6 +21,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): order = pyblish.api.CollectorOrder + 0.495 families = ["workfile", "pointcache", + "proxyAbc", "camera", "animation", "model", diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 0998e643e69..66f9a7aa598 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -81,6 +81,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder families = ["workfile", "pointcache", + "proxyAbc", "camera", "animation", "model", diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 536ab83f2c1..d05aea1e2ff 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -76,6 +76,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): order = pyblish.api.IntegratorOrder + 0.00001 families = ["workfile", "pointcache", + "proxyAbc", "camera", "animation", "model", diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index bfa3c9f0fbe..f4a9fdd9ed6 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -944,7 +944,8 @@ "subset_name_filters": [], "families": [ "animation", - "pointcache" + "pointcache", + "proxyAbc" ], "repre_names": [ "abc" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json b/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json index f39ad31fbbc..43dd74cdf9d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/template_publish_families.json @@ -28,6 +28,7 @@ {"nukenodes": "nukenodes"}, {"plate": "plate"}, {"pointcache": "pointcache"}, + {"proxyAbc": "proxyAbc"}, {"prerender": "prerender"}, {"redshiftproxy": "redshiftproxy"}, {"reference": "reference"}, From 8d8c7d1149baff769ea55ba321998d77bc777cb3 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 23 Nov 2022 16:03:52 +0000 Subject: [PATCH 018/130] Load alembic animation in Unreal --- .../plugins/load/load_alembic_animation.py | 162 ++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 openpype/hosts/unreal/plugins/load/load_alembic_animation.py diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_animation.py b/openpype/hosts/unreal/plugins/load/load_alembic_animation.py new file mode 100644 index 00000000000..496b6056ea6 --- /dev/null +++ b/openpype/hosts/unreal/plugins/load/load_alembic_animation.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +"""Load Alembic Animation.""" +import os + +from openpype.pipeline import ( + get_representation_path, + AVALON_CONTAINER_ID +) +from openpype.hosts.unreal.api import plugin +from openpype.hosts.unreal.api import pipeline as unreal_pipeline +import unreal # noqa + + +class AnimationAlembicLoader(plugin.Loader): + """Load Unreal SkeletalMesh from Alembic""" + + families = ["animation"] + label = "Import Alembic Animation" + representations = ["abc"] + icon = "cube" + color = "orange" + + def get_task(self, filename, asset_dir, asset_name, replace): + task = unreal.AssetImportTask() + options = unreal.AbcImportSettings() + sm_settings = unreal.AbcStaticMeshSettings() + conversion_settings = unreal.AbcConversionSettings( + preset=unreal.AbcConversionPreset.CUSTOM, + flip_u=False, flip_v=False, + rotation=[0.0, 0.0, 0.0], + scale=[1.0, 1.0, -1.0]) + + task.set_editor_property('filename', filename) + task.set_editor_property('destination_path', asset_dir) + task.set_editor_property('destination_name', asset_name) + task.set_editor_property('replace_existing', replace) + task.set_editor_property('automated', True) + task.set_editor_property('save', True) + + options.set_editor_property( + 'import_type', unreal.AlembicImportType.SKELETAL) + + options.static_mesh_settings = sm_settings + options.conversion_settings = conversion_settings + task.options = options + + return task + + def load(self, context, name, namespace, data): + """Load and containerise representation into Content Browser. + + This is two step process. First, import FBX to temporary path and + then call `containerise()` on it - this moves all content to new + directory and then it will create AssetContainer there and imprint it + with metadata. This will mark this path as container. + + Args: + context (dict): application context + name (str): subset name + namespace (str): in Unreal this is basically path to container. + This is not passed here, so namespace is set + by `containerise()` because only then we know + real path. + data (dict): Those would be data to be imprinted. This is not used + now, data are imprinted by `containerise()`. + + Returns: + list(str): list of container content + """ + + # Create directory for asset and openpype container + root = "/Game/OpenPype/Assets" + asset = context.get('asset').get('name') + suffix = "_CON" + if asset: + asset_name = "{}_{}".format(asset, name) + else: + asset_name = "{}".format(name) + version = context.get('version').get('name') + + tools = unreal.AssetToolsHelpers().get_asset_tools() + asset_dir, container_name = tools.create_unique_asset_name( + f"{root}/{asset}/{name}_v{version:03d}", suffix="") + + container_name += suffix + + if not unreal.EditorAssetLibrary.does_directory_exist(asset_dir): + unreal.EditorAssetLibrary.make_directory(asset_dir) + + task = self.get_task(self.fname, asset_dir, asset_name, False) + + asset_tools = unreal.AssetToolsHelpers.get_asset_tools() + asset_tools.import_asset_tasks([task]) + + # Create Asset Container + unreal_pipeline.create_container( + container=container_name, path=asset_dir) + + data = { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "asset": asset, + "namespace": asset_dir, + "container_name": container_name, + "asset_name": asset_name, + "loader": str(self.__class__.__name__), + "representation": context["representation"]["_id"], + "parent": context["representation"]["parent"], + "family": context["representation"]["context"]["family"] + } + unreal_pipeline.imprint( + "{}/{}".format(asset_dir, container_name), data) + + asset_content = unreal.EditorAssetLibrary.list_assets( + asset_dir, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + return asset_content + + def update(self, container, representation): + name = container["asset_name"] + source_path = get_representation_path(representation) + destination_path = container["namespace"] + + task = self.get_task(source_path, destination_path, name, True) + + # do import fbx and replace existing data + asset_tools = unreal.AssetToolsHelpers.get_asset_tools() + asset_tools.import_asset_tasks([task]) + + container_path = f"{container['namespace']}/{container['objectName']}" + + # update metadata + unreal_pipeline.imprint( + container_path, + { + "representation": str(representation["_id"]), + "parent": str(representation["parent"]) + }) + + asset_content = unreal.EditorAssetLibrary.list_assets( + destination_path, recursive=True, include_folder=True + ) + + for a in asset_content: + unreal.EditorAssetLibrary.save_asset(a) + + def remove(self, container): + path = container["namespace"] + parent_path = os.path.dirname(path) + + unreal.EditorAssetLibrary.delete_directory(path) + + asset_content = unreal.EditorAssetLibrary.list_assets( + parent_path, recursive=False + ) + + if len(asset_content) == 0: + unreal.EditorAssetLibrary.delete_directory(parent_path) From 20ad9dc7277671ef273a26c04f7e4c909397cffd Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 24 Nov 2022 15:50:06 +0800 Subject: [PATCH 019/130] create and publish bb geometry --- .../maya/plugins/create/create_proxy_abc.py | 6 +- .../maya/plugins/publish/extract_proxy_abc.py | 63 ++++++++++--------- 2 files changed, 36 insertions(+), 33 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py index 57978cb4d96..1ef0529a81a 100644 --- a/openpype/hosts/maya/plugins/create/create_proxy_abc.py +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -25,17 +25,13 @@ def __init__(self, *args, **kwargs): self.data["writeColorSets"] = self.write_color_sets # Vertex colors with the geometry. self.data["writeFaceSets"] = self.write_face_sets - # Include parent groups - self.data["includeParentHierarchy"] = False # only nodes which are visible self.data["visibleOnly"] = False # Default to exporting world-space self.data["worldSpace"] = True - # Creating a single bounding box per shape selected - self.data["single"] = False # remove the bbBox after publish - #self.data["removeBoundingBoxAfterPublish"] = False + self.data["removeBoundingBoxAfterPublish"] = False # name suffix for the bounding box self.data["nameSuffix"] = "_BBox" diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index ee389795771..7a2c91535f0 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -20,10 +20,7 @@ class ExtractProxyAlembic(publish.Extractor): families = ["proxyAbc"] def process(self, instance): - - nodes, roots = self.get_members_and_roots(instance) - self.log.info("nodes:{}".format(nodes)) - self.log.info("roots:{}".format(roots)) + name_suffix = instance.data.get("nameSuffix") # Collect the start and end including handles start = float(instance.data.get("frameStartHandle", 1)) end = float(instance.data.get("frameEndHandle", 1)) @@ -41,6 +38,11 @@ def process(self, instance): filename = "{name}.abc".format(**instance.data) path = os.path.join(dirname, filename) + proxy_root = self.create_proxy_geometry(instance, + name_suffix, + start, + end) + options = { "step": instance.data.get("step", 1.0), "attr": attrs, @@ -51,27 +53,17 @@ def process(self, instance): "writeFaceSets": instance.data.get("writeFaceSets", False), "uvWrite": True, "selection": True, - "worldSpace": instance.data.get("worldSpace", True) + "worldSpace": instance.data.get("worldSpace", True), + "root": proxy_root } - if not instance.data.get("includeParentHierarchy", True): - options["root"] = roots - self.log.info("{}".format(options["root"])) - if int(cmds.about(version=True)) >= 2017: # Since Maya 2017 alembic supports multiple uv sets - write them. options["writeUVSets"] = True - if instance.data.get("visibleOnly", False): - nodes = list(iter_visible_nodes_in_range(nodes, - start=start, - end=end)) with suspended_refresh(): with maintained_selection(): - self.create_proxy_geometry(instance, - nodes, - start, - end) + cmds.select(proxy_root, hi=True, noExpand=True) extract_alembic(file=path, startFrame=start, endFrame=end, @@ -91,20 +83,35 @@ def process(self, instance): instance.context.data["cleanupFullPaths"].append(path) self.log.info("Extracted {} to {}".format(instance, dirname)) - - def get_members_and_roots(self, instance): - return instance[:], instance.data.get("setMembers") - - def create_proxy_geometry(self, instance, node, start, end): - inst_selection = cmds.ls(node, long=True) - name_suffix = instance.data.get("nameSuffix") +#TODO: clean up the bounding box + remove_bb = instance.data.get("removeBoundingBoxAfterPublish") + if remove_bb: + for bbox in proxy_root: + bounding_box = cmds.listRelatives(bbox, parent=True) + cmds.delete(bounding_box) + + def create_proxy_geometry(self, instance, name_suffix, start, end): + nodes = instance[:] + if instance.data.get("visibleOnly", False): + nodes = list(iter_visible_nodes_in_range(nodes, + start=start, + end=end)) + inst_selection = cmds.ls(nodes, long=True) + proxy_root = [] bbox = cmds.geomToBBox(inst_selection, - name=instance.name, nameSuffix=name_suffix, - single=instance.data.get("single", False), keepOriginal=True, + single=False, bakeAnimation=True, startTime=start, endTime=end) - return cmds.select(bbox, noExpand=True) - + for b in bbox: + dep_node = cmds.ls(b, dag=True, shapes=False, + noIntermediate=True, sn=True) + + for dep in dep_node: + if "Shape" in dep: + continue + proxy_root.append(dep) + self.log.debug("proxy_root: {}".format(proxy_root)) + return proxy_root From a0721ba2555f890873ceb9454f2ce3d00f544686 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 24 Nov 2022 16:04:09 +0800 Subject: [PATCH 020/130] create and publish bb geometry --- .../maya/plugins/create/create_proxy_abc.py | 1 - .../maya/plugins/publish/extract_proxy_abc.py | 2 +- .../defaults/project_settings/maya.json | 14 ++++---- .../schemas/schema_maya_create.json | 34 ++++++++++++++++--- 4 files changed, 39 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py index 1ef0529a81a..4401f3c04ff 100644 --- a/openpype/hosts/maya/plugins/create/create_proxy_abc.py +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -14,7 +14,6 @@ class CreateProxyAlembic(plugin.Creator): write_color_sets = False write_face_sets = False - def __init__(self, *args, **kwargs): super(CreateProxyAlembic, self).__init__(*args, **kwargs) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index 7a2c91535f0..f65626e9154 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -83,7 +83,7 @@ def process(self, instance): instance.context.data["cleanupFullPaths"].append(path) self.log.info("Extracted {} to {}".format(instance, dirname)) -#TODO: clean up the bounding box + remove_bb = instance.data.get("removeBoundingBoxAfterPublish") if remove_bb: for bbox in proxy_root: diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index f4a9fdd9ed6..78126283d06 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -149,6 +149,14 @@ "Main" ] }, + "CreateProxyAlembic": { + "enabled": true, + "write_color_sets": false, + "write_face_sets": false, + "defaults": [ + "Main" + ] + }, "CreateMultiverseUsd": { "enabled": true, "defaults": [ @@ -197,12 +205,6 @@ "Main" ] }, - "CreateProxyAlembic": { - "enabled": true, - "defaults": [ - "Main" - ] - }, "CreateRenderSetup": { "enabled": true, "defaults": [ diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 8512736211b..198b399e75d 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -200,6 +200,36 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CreateProxyAlembic", + "label": "Create Proxy Alembic", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "write_color_sets", + "label": "Write Color Sets" + }, + { + "type": "boolean", + "key": "write_face_sets", + "label": "Write Face Sets" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + } + ] + }, { "type": "schema_template", @@ -237,10 +267,6 @@ "key": "CreateMayaScene", "label": "Create Maya Scene" }, - { - "key": "CreateProxyAlembic", - "label": "Create Proxy Alembic" - }, { "key": "CreateRenderSetup", "label": "Create Render Setup" From 74ab26863c6063eaaaac17d3f005f2c692debf2c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 26 Nov 2022 15:56:35 +0800 Subject: [PATCH 021/130] glTF-extractor-for-Maya --- openpype/hosts/maya/api/gltf.py | 88 +++++++++++++++++++ .../maya/plugins/publish/collect_gltf.py | 18 ++++ .../maya/plugins/publish/extract_gltf.py | 65 ++++++++++++++ .../plugins/publish/collect_resources_path.py | 1 + openpype/plugins/publish/integrate.py | 1 + openpype/plugins/publish/integrate_legacy.py | 1 + .../defaults/project_settings/maya.json | 4 + .../schemas/schema_maya_publish.json | 21 ++++- 8 files changed, 198 insertions(+), 1 deletion(-) create mode 100644 openpype/hosts/maya/api/gltf.py create mode 100644 openpype/hosts/maya/plugins/publish/collect_gltf.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_gltf.py diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py new file mode 100644 index 00000000000..dd2a95a6d9b --- /dev/null +++ b/openpype/hosts/maya/api/gltf.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +"""Tools to work with GLTF.""" +import logging + +from pyblish.api import Instance + +from maya import cmds, mel # noqa + +log = logging.getLogger(__name__) + +_gltf_options = { + "of": str, # outputFolder + "cpr": str, # copyright + "sno": bool, # selectedNodeOnly + "sn": str, # sceneName + "glb": bool, # binary + "nbu": bool, # niceBufferURIs + "hbu": bool, # hashBufferURI + "ext": bool, # externalTextures + "ivt": int, # initialValuesTime + "acn": str, # animationClipName + "ast": int, # animationClipStartTime + "aet": int, # animationClipEndTime + "afr": float, # animationClipFrameRate + "dsa": int, # detectStepAnimations + "mpa": str, # meshPrimitiveAttributes + "bpa": str, # blendPrimitiveAttributes + "i32": bool, # force32bitIndices + "ssm": bool, # skipStandardMaterials + "eut":bool, # excludeUnusedTexcoord + "dm": bool, # defaultMaterial + "cm": bool, # colorizeMaterials + "dmy": str, # dumpMaya + "dgl": str, # dumpGLTF + "imd": str, # ignoreMeshDeformers + "ssc": bool, # skipSkinClusters + "sbs": bool, # skipBlendShapes + "rvp": bool, # redrawViewport + "vno": bool # visibleNodesOnly +} + +def extract_gltf(parent_dir, + filename, + **kwargs): + + """Sets GLTF export options from data in the instance. + + """ + + cmds.loadPlugin('maya2glTF', quiet=True) + # load the UI to run mel command + mel.eval("maya2glTF_UI()") + + parent_dir = parent_dir.replace('\\', '/') + options = { + "dsa": 1, + "glb": True + } + options.update(kwargs) + + for key, value in options.copy().items(): + if key not in _gltf_options: + log.warning("extract_gltf() does not support option '%s'. " + "Flag will be ignored..", key) + options.pop(key) + continue + + job_args = list() + default_opt = "maya2glTF -of \"{0}\" -sn \"{1}\"".format(parent_dir, filename) # noqa + job_args.append(default_opt) + + for key, value in options.items(): + if isinstance(value, str): + job_args.append("-{0} \"{1}\"".format(key, value)) + elif isinstance(value, bool): + if value: + job_args.append("-{0}".format(key)) + else: + job_args.append("-{0} {1}".format(key, value)) + + job_str = " ".join(job_args) + log.info("{}".format(job_str)) + mel.eval(job_str) + + # close the gltf export after finish the export + gltf_UI = "maya2glTF_exporter_window" + if cmds.window(gltf_UI, q = True, exists =True): + cmds.deleteUI(gltf_UI) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py new file mode 100644 index 00000000000..dba06dca230 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +import pyblish.api + + +class CollectGLTF(pyblish.api.InstancePlugin): + """Collect Assets for GLTF/GLB export.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Asset for GLTF/GLB export" + families = ["model", "animation", "pointcache"] + + def process(self, instance): + if not instance.data.get("families"): + instance.data["families"] = [] + + if "fbx" not in instance.data["families"]: + instance.data["families"].append("gltf") + diff --git a/openpype/hosts/maya/plugins/publish/extract_gltf.py b/openpype/hosts/maya/plugins/publish/extract_gltf.py new file mode 100644 index 00000000000..1cab40d8255 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_gltf.py @@ -0,0 +1,65 @@ +import os + +from maya import cmds, mel +import pyblish.api + +from openpype.pipeline import publish +from openpype.hosts.maya.api import lib +from openpype.hosts.maya.api.gltf import extract_gltf + +class ExtractGLB(publish.Extractor): + + order = pyblish.api.ExtractorOrder + hosts = ["maya"] + label = "Extract GLB" + families = ["gltf"] + + def process(self, instance): + staging_dir = self.staging_dir(instance) + filename = "{0}.glb".format(instance.name) + path = os.path.join(staging_dir, filename) + + + self.log.info("Extracting GLB to: {}".format(path)) + + nodes = instance[:] + + self.log.info("Instance: {0}".format(nodes)) + + start_frame = instance.data('frameStart') or \ + int(cmds.playbackOptions(query=True, + animationStartTime=True)) + end_frame = instance.data('frameEnd') or \ + int(cmds.playbackOptions(query=True, + animationEndTime=True)) + fps = mel.eval('currentTimeUnitToFPS()') + + options = { + "sno": True, #selectedNodeOnly + "nbu": True, # .bin instead of .bin0 + "ast": start_frame, + "aet": end_frame, + "afr": fps, + "dsa": 1, + "acn": instance.name, + "glb": True, + "vno": True #visibleNodeOnly + } + with lib.maintained_selection(): + cmds.select(nodes, hi=True, noExpand=True) + extract_gltf(staging_dir, + instance.name, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'glb', + 'ext': 'glb', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.info("Extract GLB successful to: {0}".format(path)) diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 00f65b8b672..70610da909f 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -50,6 +50,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "source", "assembly", "fbx", + "gltf", "textures", "action", "background", diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 401270a788d..3c781099346 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -111,6 +111,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "image", "assembly", "fbx", + "gltf", "textures", "action", "harmony.template", diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index 536ab83f2c1..2a3512471c2 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -106,6 +106,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "image", "assembly", "fbx", + "gltf", "textures", "action", "harmony.template", diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a2..3413dee83bf 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -250,6 +250,10 @@ "CollectFbxCamera": { "enabled": false }, + "CollectGLTF": { + "enabled": true, + "glb" : true + }, "ValidateInstanceInContext": { "enabled": true, "optional": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index ab8c6b885ec..3aca9b20104 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -35,6 +35,25 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CollectGLTF", + "label": "Collect Assets for GLTF/GLB export", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "boolean", + "key": "glb", + "label": "Export GLB" + } + ] + }, { "type": "splitter" }, @@ -62,7 +81,7 @@ } ] }, - { + { "type": "dict", "collapsible": true, "key": "ValidateFrameRange", From 13002a39491410e27e84fd02a9f46cc19510ed52 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 26 Nov 2022 16:14:29 +0800 Subject: [PATCH 022/130] glTF-extractor-for-Maya --- openpype/hosts/maya/api/gltf.py | 62 +++++++++---------- .../maya/plugins/publish/collect_gltf.py | 1 - .../maya/plugins/publish/extract_gltf.py | 10 +-- .../defaults/project_settings/maya.json | 2 +- 4 files changed, 37 insertions(+), 38 deletions(-) diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py index dd2a95a6d9b..2519bd1220d 100644 --- a/openpype/hosts/maya/api/gltf.py +++ b/openpype/hosts/maya/api/gltf.py @@ -2,43 +2,42 @@ """Tools to work with GLTF.""" import logging -from pyblish.api import Instance - from maya import cmds, mel # noqa log = logging.getLogger(__name__) _gltf_options = { - "of": str, # outputFolder - "cpr": str, # copyright - "sno": bool, # selectedNodeOnly - "sn": str, # sceneName - "glb": bool, # binary - "nbu": bool, # niceBufferURIs - "hbu": bool, # hashBufferURI - "ext": bool, # externalTextures - "ivt": int, # initialValuesTime - "acn": str, # animationClipName - "ast": int, # animationClipStartTime - "aet": int, # animationClipEndTime - "afr": float, # animationClipFrameRate - "dsa": int, # detectStepAnimations - "mpa": str, # meshPrimitiveAttributes - "bpa": str, # blendPrimitiveAttributes - "i32": bool, # force32bitIndices - "ssm": bool, # skipStandardMaterials - "eut":bool, # excludeUnusedTexcoord - "dm": bool, # defaultMaterial - "cm": bool, # colorizeMaterials - "dmy": str, # dumpMaya - "dgl": str, # dumpGLTF - "imd": str, # ignoreMeshDeformers - "ssc": bool, # skipSkinClusters - "sbs": bool, # skipBlendShapes - "rvp": bool, # redrawViewport - "vno": bool # visibleNodesOnly + "of" : str, # outputFolder + "cpr" : str, # copyright + "sno" : bool, # selectedNodeOnly + "sn" : str, # sceneName + "glb" : bool, # binary + "nbu" : bool, # niceBufferURIs + "hbu" : bool, # hashBufferURI + "ext" : bool, # externalTextures + "ivt" : int, # initialValuesTime + "acn" : str, # animationClipName + "ast" : int, # animationClipStartTime + "aet" : int, # animationClipEndTime + "afr" : float, # animationClipFrameRate + "dsa" : int, # detectStepAnimations + "mpa" : str, # meshPrimitiveAttributes + "bpa" : str, # blendPrimitiveAttributes + "i32" : bool, # force32bitIndices + "ssm" : bool, # skipStandardMaterials + "eut": bool, # excludeUnusedTexcoord + "dm" : bool, # defaultMaterial + "cm" : bool, # colorizeMaterials + "dmy" : str, # dumpMaya + "dgl" : str, # dumpGLTF + "imd" : str, # ignoreMeshDeformers + "ssc" : bool, # skipSkinClusters + "sbs" : bool, # skipBlendShapes + "rvp" : bool, # redrawViewport + "vno" : bool # visibleNodesOnly } + def extract_gltf(parent_dir, filename, **kwargs): @@ -63,6 +62,7 @@ def extract_gltf(parent_dir, log.warning("extract_gltf() does not support option '%s'. " "Flag will be ignored..", key) options.pop(key) + options.pop(value) continue job_args = list() @@ -84,5 +84,5 @@ def extract_gltf(parent_dir, # close the gltf export after finish the export gltf_UI = "maya2glTF_exporter_window" - if cmds.window(gltf_UI, q = True, exists =True): + if cmds.window(gltf_UI, q=True, exists=True): cmds.deleteUI(gltf_UI) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py index dba06dca230..bbc4e31f92d 100644 --- a/openpype/hosts/maya/plugins/publish/collect_gltf.py +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -15,4 +15,3 @@ def process(self, instance): if "fbx" not in instance.data["families"]: instance.data["families"].append("gltf") - diff --git a/openpype/hosts/maya/plugins/publish/extract_gltf.py b/openpype/hosts/maya/plugins/publish/extract_gltf.py index 1cab40d8255..f5ceed5f33f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_gltf.py +++ b/openpype/hosts/maya/plugins/publish/extract_gltf.py @@ -7,6 +7,7 @@ from openpype.hosts.maya.api import lib from openpype.hosts.maya.api.gltf import extract_gltf + class ExtractGLB(publish.Extractor): order = pyblish.api.ExtractorOrder @@ -19,7 +20,6 @@ def process(self, instance): filename = "{0}.glb".format(instance.name) path = os.path.join(staging_dir, filename) - self.log.info("Extracting GLB to: {}".format(path)) nodes = instance[:] @@ -28,14 +28,14 @@ def process(self, instance): start_frame = instance.data('frameStart') or \ int(cmds.playbackOptions(query=True, - animationStartTime=True)) + animationStartTime=True))# noqa end_frame = instance.data('frameEnd') or \ int(cmds.playbackOptions(query=True, - animationEndTime=True)) + animationEndTime=True)) # noqa fps = mel.eval('currentTimeUnitToFPS()') options = { - "sno": True, #selectedNodeOnly + "sno": True, # selectedNodeOnly "nbu": True, # .bin instead of .bin0 "ast": start_frame, "aet": end_frame, @@ -43,7 +43,7 @@ def process(self, instance): "dsa": 1, "acn": instance.name, "glb": True, - "vno": True #visibleNodeOnly + "vno": True # visibleNodeOnly } with lib.maintained_selection(): cmds.select(nodes, hi=True, noExpand=True) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 3413dee83bf..e73f73161e3 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -251,7 +251,7 @@ "enabled": false }, "CollectGLTF": { - "enabled": true, + "enabled": false, "glb" : true }, "ValidateInstanceInContext": { From 6dadff2b58ebbc14c1e3622aa866fca5abe31490 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 26 Nov 2022 16:17:09 +0800 Subject: [PATCH 023/130] glTF-extractor-for-Maya --- openpype/hosts/maya/api/gltf.py | 54 ++++++++++++++++----------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py index 2519bd1220d..2a983f1573b 100644 --- a/openpype/hosts/maya/api/gltf.py +++ b/openpype/hosts/maya/api/gltf.py @@ -7,34 +7,34 @@ log = logging.getLogger(__name__) _gltf_options = { - "of" : str, # outputFolder - "cpr" : str, # copyright - "sno" : bool, # selectedNodeOnly - "sn" : str, # sceneName - "glb" : bool, # binary - "nbu" : bool, # niceBufferURIs - "hbu" : bool, # hashBufferURI - "ext" : bool, # externalTextures - "ivt" : int, # initialValuesTime - "acn" : str, # animationClipName - "ast" : int, # animationClipStartTime - "aet" : int, # animationClipEndTime - "afr" : float, # animationClipFrameRate - "dsa" : int, # detectStepAnimations - "mpa" : str, # meshPrimitiveAttributes - "bpa" : str, # blendPrimitiveAttributes - "i32" : bool, # force32bitIndices - "ssm" : bool, # skipStandardMaterials + "of": str, # outputFolder + "cpr": str, # copyright + "sno": bool, # selectedNodeOnly + "sn": str, # sceneName + "glb": bool, # binary + "nbu": bool, # niceBufferURIs + "hbu": bool, # hashBufferURI + "ext": bool, # externalTextures + "ivt": int, # initialValuesTime + "acn": str, # animationClipName + "ast": int, # animationClipStartTime + "aet": int, # animationClipEndTime + "afr": float, # animationClipFrameRate + "dsa": int, # detectStepAnimations + "mpa": str, # meshPrimitiveAttributes + "bpa": str, # blendPrimitiveAttributes + "i32": bool, # force32bitIndices + "ssm": bool, # skipStandardMaterials "eut": bool, # excludeUnusedTexcoord - "dm" : bool, # defaultMaterial - "cm" : bool, # colorizeMaterials - "dmy" : str, # dumpMaya - "dgl" : str, # dumpGLTF - "imd" : str, # ignoreMeshDeformers - "ssc" : bool, # skipSkinClusters - "sbs" : bool, # skipBlendShapes - "rvp" : bool, # redrawViewport - "vno" : bool # visibleNodesOnly + "dm": bool, # defaultMaterial + "cm": bool, # colorizeMaterials + "dmy": str, # dumpMaya + "dgl": str, # dumpGLTF + "imd": str, # ignoreMeshDeformers + "ssc": bool, # skipSkinClusters + "sbs": bool, # skipBlendShapes + "rvp": bool, # redrawViewport + "vno": bool # visibleNodesOnly } From a9e2e7392295cf8edfcfd1345c31a14b41a12939 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Sat, 26 Nov 2022 20:24:53 +0000 Subject: [PATCH 024/130] Maintain time connections on update. --- openpype/hosts/maya/api/plugin.py | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 39d821f6208..985cddaa080 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -217,7 +217,7 @@ def update(self, container, representation): # Need to save alembic settings and reapply, cause referencing resets # them to incoming data. - alembic_attrs = ["speed", "offset", "cycleType"] + alembic_attrs = ["speed", "offset", "cycleType", "time"] alembic_data = {} if representation["name"] == "abc": alembic_nodes = cmds.ls( @@ -226,7 +226,17 @@ def update(self, container, representation): if alembic_nodes: for attr in alembic_attrs: node_attr = "{}.{}".format(alembic_nodes[0], attr) - alembic_data[attr] = cmds.getAttr(node_attr) + connections = cmds.listConnections(node_attr, plugs=True) + data = { + "connected": False, + "attribute": None, + "value": cmds.getAttr(node_attr) + } + if connections: + data["connected"] = True + data["attribute"] = connections[0] + + alembic_data[attr] = data else: self.log.debug("No alembic nodes found in {}".format(members)) @@ -263,8 +273,14 @@ def update(self, container, representation): "{}:*".format(namespace), type="AlembicNode" ) if alembic_nodes: - for attr, value in alembic_data.items(): - cmds.setAttr("{}.{}".format(alembic_nodes[0], attr), value) + for attr, data in alembic_data.items(): + node_attr = "{}.{}".format(alembic_nodes[0], attr) + if data["connected"]: + cmds.connectAttr( + data["attribute"], node_attr, force=True + ) + else: + cmds.setAttr(node_attr, data["value"]) # Fix PLN-40 for older containers created with Avalon that had the # `.verticesOnlySet` set to True. From b0e8490dd0e6a06b76df0a7b8b5b68b3766c5049 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 28 Nov 2022 14:52:44 +0800 Subject: [PATCH 025/130] add proxyAbc as the family of the validator of animation out set related node ids --- .../publish/validate_animation_out_set_related_node_ids.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py index 649913fff6f..5a527031bed 100644 --- a/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py +++ b/openpype/hosts/maya/plugins/publish/validate_animation_out_set_related_node_ids.py @@ -20,7 +20,7 @@ class ValidateOutRelatedNodeIds(pyblish.api.InstancePlugin): """ order = ValidateContentsOrder - families = ['animation', "pointcache"] + families = ['animation', "pointcache", "proxyAbc"] hosts = ['maya'] label = 'Animation Out Set Related Node Ids' actions = [ From 0c54d8fcad1babdd9c03891e695db8da46eb7a51 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 28 Nov 2022 10:15:30 +0000 Subject: [PATCH 026/130] Enable thumbnail transparency on extraction. --- openpype/hosts/maya/plugins/publish/extract_thumbnail.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 712159c2bed..311278145a0 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -105,6 +105,11 @@ def process(self, instance): pm.currentTime(refreshFrameInt - 1, edit=True) pm.currentTime(refreshFrameInt, edit=True) + # Override transparency if requested. + transparency = instance.data.get("transparency", 0) + if transparency != 0: + preset["viewport2_options"]["transparencyAlgorithm"] = transparency + # Isolate view is requested by having objects in the set besides a # camera. if preset.pop("isolate_view", False) and instance.data.get("isolate"): From 37535f35bdc5792e5ac3b0c5acef52cc8ad8c5dd Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Tue, 29 Nov 2022 21:56:34 +0800 Subject: [PATCH 027/130] gltf extractor for Maya --- openpype/settings/defaults/project_settings/maya.json | 3 +-- .../schemas/projects_schema/schemas/schema_maya_publish.json | 5 ----- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index e73f73161e3..59e71b2e29e 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -251,8 +251,7 @@ "enabled": false }, "CollectGLTF": { - "enabled": false, - "glb" : true + "enabled": false }, "ValidateInstanceInContext": { "enabled": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 3aca9b20104..e7a56d0749f 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -46,11 +46,6 @@ "type": "boolean", "key": "enabled", "label": "Enabled" - }, - { - "type": "boolean", - "key": "glb", - "label": "Export GLB" } ] }, From 9a439d408bbdb1ed2b32a61b1ab63f1740a72f65 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Wed, 30 Nov 2022 17:36:51 +0000 Subject: [PATCH 028/130] Improvements - account for time attribute reconnection. - simpler data collection --- openpype/hosts/maya/api/plugin.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 985cddaa080..66b525bad15 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -226,15 +226,13 @@ def update(self, container, representation): if alembic_nodes: for attr in alembic_attrs: node_attr = "{}.{}".format(alembic_nodes[0], attr) - connections = cmds.listConnections(node_attr, plugs=True) + inputs = cmds.listConnections( + node_attr, plugs=True, destination=False + ) data = { - "connected": False, - "attribute": None, + "input": None if inputs is None else inputs[0], "value": cmds.getAttr(node_attr) } - if connections: - data["connected"] = True - data["attribute"] = connections[0] alembic_data[attr] = data else: @@ -275,11 +273,16 @@ def update(self, container, representation): if alembic_nodes: for attr, data in alembic_data.items(): node_attr = "{}.{}".format(alembic_nodes[0], attr) - if data["connected"]: + if data["input"]: cmds.connectAttr( - data["attribute"], node_attr, force=True + data["input"], node_attr, force=True ) else: + inputs = cmds.listConnections( + node_attr, plugs=True, destination=False + ) + if inputs: + cmds.disconnectAttr(inputs[0], node_attr) cmds.setAttr(node_attr, data["value"]) # Fix PLN-40 for older containers created with Avalon that had the From aa6425cbf1cabc6b988b9008ac7a36151a7fed77 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 1 Dec 2022 17:56:13 +0100 Subject: [PATCH 029/130] don't create qapplication if already exists --- openpype/tools/settings/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/tools/settings/__init__.py b/openpype/tools/settings/__init__.py index 3e77a8348a8..67e509f116a 100644 --- a/openpype/tools/settings/__init__.py +++ b/openpype/tools/settings/__init__.py @@ -24,7 +24,9 @@ def main(user_role=None): user_role, ", ".join(allowed_roles) )) - app = QtWidgets.QApplication(sys.argv) + app = QtWidgets.QApplication.instance() + if not app: + app = QtWidgets.QApplication(sys.argv) app.setWindowIcon(QtGui.QIcon(style.app_icon_path())) widget = MainWidget(user_role) From b5a5c72d896eae245adcf43e9a09c2e1f031ac44 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 2 Dec 2022 10:03:56 +0000 Subject: [PATCH 030/130] Comments to resolve. --- openpype/hosts/maya/api/mtoa.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/mtoa.py b/openpype/hosts/maya/api/mtoa.py index 6b9b1d6d443..d19fecf6b5b 100644 --- a/openpype/hosts/maya/api/mtoa.py +++ b/openpype/hosts/maya/api/mtoa.py @@ -56,7 +56,8 @@ def get_ass_export_mask(self, maya_set): return mask def process(self, instance): - + #What is a dry run? + #ass.rr seems like an abstract variable. Needs clarification. dry_run = instance.data.get("ass.rr") staging_dir = self.staging_dir(instance) @@ -92,6 +93,7 @@ def process(self, instance): else: mask = 44 + #get/set should be plugin options. # Generic options if self.get_set_attr("{}.inf_ass_expand_procedurals".format(set_), False): @@ -108,6 +110,7 @@ def process(self, instance): keys = self.get_set_attr("{}.inf_ass_mb_keys".format(set_), -1) length = self.get_set_attr("{}.inf_ass_mb_length".format(set_), -1) + #Targets should already be collected targets = self.get_targets(instance) _sorted_kwargs = sorted(kwargs.items(), key=lambda x: x[0]) @@ -116,6 +119,8 @@ def process(self, instance): if not dry_run: self.log.debug("Running command: cmds.arnoldExportAss({})" .format(", ".join(_sorted_kwargs))) + #There should be a context for not updating the viewport from + #pointcache extraction. with vp2_paused_context(): with selection(targets): with self.motion_blur_ctx(mb, keys, length): @@ -131,11 +136,14 @@ def process(self, instance): range_ = range(int(start), int(end) + 1) for i in range_: + #padding amount should be configurable. 3 does not seems + #enough as default. fp = "{}.{:03d}.ass".format(export_path.name, i) with open(fp, "w"): pass result.append(fp) + #Whether its a sequence or not, should already have been determined. if len(result) == 1: filepath = result[0] else: From 103fd66282217ede3a69c04d9e251e23a6a4dbbd Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 2 Dec 2022 19:53:54 +0800 Subject: [PATCH 031/130] layout publish more than one container issue --- .../maya/plugins/publish/extract_layout.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index f77835d47fa..a11652feb30 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -39,15 +39,17 @@ def process(self, instance): assert len(container_list) == 1, \ "Please create instance with loaded asset" containers = cmds.sets(project_container, query=True) - load_asset = asset.split(':')[0] - for con in containers: - ass_transform = cmds.listRelatives(con, allParents=True)[0] - if load_asset not in ass_transform: - assert containers == [], \ - "No container found for {}".format(asset) - if "_CON" not in con: + # list the children of the containers + ass_transform = cmds.listRelatives(containers, allParents=True) + ass = cmds.listRelatives(asset, children=True, type="transform") + # compare the group of the asset with + # the children list of the container + # to find the content which is not loaded from the loader + for a in ass: + if a not in ass_transform: assert containers == [], \ - "Container missing for {}".format(asset) + "no container found in {}".format(a) + for con in containers: container = con representation_id = cmds.getAttr( From debcf19e68ac44f139ebf22038f487fbb790975a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 2 Dec 2022 23:32:49 +0800 Subject: [PATCH 032/130] fix the ancestor issues for bbox selection --- .../maya/plugins/create/create_proxy_abc.py | 2 -- .../maya/plugins/publish/extract_proxy_abc.py | 22 ++++++------------- 2 files changed, 7 insertions(+), 17 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py index 4401f3c04ff..2d81cb663b2 100644 --- a/openpype/hosts/maya/plugins/create/create_proxy_abc.py +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -24,8 +24,6 @@ def __init__(self, *args, **kwargs): self.data["writeColorSets"] = self.write_color_sets # Vertex colors with the geometry. self.data["writeFaceSets"] = self.write_face_sets - # only nodes which are visible - self.data["visibleOnly"] = False # Default to exporting world-space self.data["worldSpace"] = True diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index f65626e9154..fd70c8506b4 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -92,12 +92,10 @@ def process(self, instance): def create_proxy_geometry(self, instance, name_suffix, start, end): nodes = instance[:] - if instance.data.get("visibleOnly", False): - nodes = list(iter_visible_nodes_in_range(nodes, - start=start, - end=end)) + nodes = list(iter_visible_nodes_in_range(nodes, + start=start, + end=end)) inst_selection = cmds.ls(nodes, long=True) - proxy_root = [] bbox = cmds.geomToBBox(inst_selection, nameSuffix=name_suffix, keepOriginal=True, @@ -105,13 +103,7 @@ def create_proxy_geometry(self, instance, name_suffix, start, end): bakeAnimation=True, startTime=start, endTime=end) - for b in bbox: - dep_node = cmds.ls(b, dag=True, shapes=False, - noIntermediate=True, sn=True) - - for dep in dep_node: - if "Shape" in dep: - continue - proxy_root.append(dep) - self.log.debug("proxy_root: {}".format(proxy_root)) - return proxy_root + bbox_sel = cmds.ls(sl=True, long=True) + # bbox_sel = cmds.listRelatives(allDescendents=True, fullPath=True, type="mesh") + self.log.debug("proxy_root: {}".format(bbox_sel)) + return bbox_sel From f96e4f162633425c00b077043624c09982a44d3d Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 3 Dec 2022 00:20:18 +0800 Subject: [PATCH 033/130] layout publish more than one container issue --- .../maya/plugins/publish/extract_layout.py | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index a11652feb30..e0bf1588518 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -37,19 +37,14 @@ def process(self, instance): project_container = self.project_container container_list = cmds.ls(project_container) assert len(container_list) == 1, \ - "Please create instance with loaded asset" - containers = cmds.sets(project_container, query=True) + "Please create instance with loaded asset!" # list the children of the containers - ass_transform = cmds.listRelatives(containers, allParents=True) - ass = cmds.listRelatives(asset, children=True, type="transform") - # compare the group of the asset with - # the children list of the container - # to find the content which is not loaded from the loader - for a in ass: - if a not in ass_transform: + grp_name = asset.split(':')[0] + con_sel = cmds.ls("{}*_CON".format(grp_name)) + if not con_sel: assert containers == [], \ - "no container found in {}".format(a) - for con in containers: + "Use all loaded contents without renaming and grouping!" # noqa + for con in con_sel: container = con representation_id = cmds.getAttr( From f50fef2be8829e273ac08d85a431c1c1352e2b39 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 3 Dec 2022 00:21:31 +0800 Subject: [PATCH 034/130] layout publish more than one container issue --- openpype/hosts/maya/plugins/publish/extract_layout.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index e0bf1588518..67a4bc564ea 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -40,11 +40,11 @@ def process(self, instance): "Please create instance with loaded asset!" # list the children of the containers grp_name = asset.split(':')[0] - con_sel = cmds.ls("{}*_CON".format(grp_name)) - if not con_sel: + containers = cmds.ls("{}*_CON".format(grp_name)) + if not containers: assert containers == [], \ "Use all loaded contents without renaming and grouping!" # noqa - for con in con_sel: + for con in containers: container = con representation_id = cmds.getAttr( From 99c7faf78ff9e1ab2d7692a16f3769074ed895f4 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 3 Dec 2022 00:24:29 +0800 Subject: [PATCH 035/130] layer publish more than one container issue --- openpype/hosts/maya/plugins/publish/extract_layout.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 67a4bc564ea..bf41ca65ba5 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -40,11 +40,11 @@ def process(self, instance): "Please create instance with loaded asset!" # list the children of the containers grp_name = asset.split(':')[0] - containers = cmds.ls("{}*_CON".format(grp_name)) - if not containers: - assert containers == [], \ + container_sel = cmds.ls("{}*_CON".format(grp_name)) + if not container_sel: + assert container_sel == [], \ "Use all loaded contents without renaming and grouping!" # noqa - for con in containers: + for con in container_sel: container = con representation_id = cmds.getAttr( From 632ee268e172256b63eb88d081abcfe1bbbdee00 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sat, 3 Dec 2022 00:26:25 +0800 Subject: [PATCH 036/130] layer publish more than one container issue --- openpype/hosts/maya/plugins/publish/extract_layout.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index bf41ca65ba5..4ad2248d625 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -42,8 +42,8 @@ def process(self, instance): grp_name = asset.split(':')[0] container_sel = cmds.ls("{}*_CON".format(grp_name)) if not container_sel: - assert container_sel == [], \ - "Use all loaded contents without renaming and grouping!" # noqa + assert container_sel == [], \ + "Use all loaded contents without renaming and grouping!" # noqa for con in container_sel: container = con From d57fdcf7971bd99422b676a2a9074b2f7eb4e767 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sun, 4 Dec 2022 19:16:01 +0800 Subject: [PATCH 037/130] layout publish more than one container issue --- .../maya/plugins/create/create_layout.py | 7 +++++++ .../maya/plugins/publish/extract_layout.py | 21 ++++++++++++------- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_layout.py b/openpype/hosts/maya/plugins/create/create_layout.py index 6dc87430aa0..9fc0c5c4bce 100644 --- a/openpype/hosts/maya/plugins/create/create_layout.py +++ b/openpype/hosts/maya/plugins/create/create_layout.py @@ -8,3 +8,10 @@ class CreateLayout(plugin.Creator): label = "Layout" family = "layout" icon = "cubes" + def __init__(self, *args, **kwargs): + super(CreateLayout, self).__init__(*args, **kwargs) + + + # enable this when you want to + # publish group of loaded asset + self.data["groupLoadedAssets"] = False diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 4ad2248d625..6b9af68997e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -38,14 +38,19 @@ def process(self, instance): container_list = cmds.ls(project_container) assert len(container_list) == 1, \ "Please create instance with loaded asset!" - # list the children of the containers - grp_name = asset.split(':')[0] - container_sel = cmds.ls("{}*_CON".format(grp_name)) - if not container_sel: - assert container_sel == [], \ - "Use all loaded contents without renaming and grouping!" # noqa - for con in container_sel: - container = con + + grp_loaded_ass = instance.data.get("groupLoadedAssets", False) + if grp_loaded_ass: + asset_list = cmds.listRelatives(asset, children=True) + for asset in asset_list: + grp_name = asset.split(':')[0] + else: + grp_name = asset.split(':')[0] + containers = cmds.ls("{}*_CON".format(grp_name)) + assert len(containers) > 0, \ + "Use all loaded contents without renaming" \ + "(and/or grouping if groupLoadedAssets disabled)" # noqa + container = containers[0] representation_id = cmds.getAttr( "{}.representation".format(container)) From 9f7377c304b20dc954f5d0d91c1d1831cdfd1c2e Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sun, 4 Dec 2022 19:16:51 +0800 Subject: [PATCH 038/130] layout publish more than one container issue --- openpype/hosts/maya/plugins/create/create_layout.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/maya/plugins/create/create_layout.py b/openpype/hosts/maya/plugins/create/create_layout.py index 9fc0c5c4bce..6f5b3636933 100644 --- a/openpype/hosts/maya/plugins/create/create_layout.py +++ b/openpype/hosts/maya/plugins/create/create_layout.py @@ -8,6 +8,7 @@ class CreateLayout(plugin.Creator): label = "Layout" family = "layout" icon = "cubes" + def __init__(self, *args, **kwargs): super(CreateLayout, self).__init__(*args, **kwargs) From 5ed958056a079e5090b75ba1050f6c6bb6c3bcac Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Sun, 4 Dec 2022 19:18:04 +0800 Subject: [PATCH 039/130] layout publish more than one container issue --- openpype/hosts/maya/plugins/create/create_layout.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_layout.py b/openpype/hosts/maya/plugins/create/create_layout.py index 6f5b3636933..1768a3d49ef 100644 --- a/openpype/hosts/maya/plugins/create/create_layout.py +++ b/openpype/hosts/maya/plugins/create/create_layout.py @@ -11,8 +11,6 @@ class CreateLayout(plugin.Creator): def __init__(self, *args, **kwargs): super(CreateLayout, self).__init__(*args, **kwargs) - - # enable this when you want to # publish group of loaded asset self.data["groupLoadedAssets"] = False From 7772ac7ea9125e2d06d5e31cf5771bb7e2211ca1 Mon Sep 17 00:00:00 2001 From: 2-REC Date: Mon, 5 Dec 2022 15:58:37 +0700 Subject: [PATCH 040/130] Added missing parenthesis --- .../event_handlers_server/event_first_version_status.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py b/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py index ecc6c95d90f..8ef333effd3 100644 --- a/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py +++ b/openpype/modules/ftrack/event_handlers_server/event_first_version_status.py @@ -135,9 +135,9 @@ def launch(self, session, event): new_status = asset_version_statuses.get(found_item["status"]) if not new_status: - self.log.warning( + self.log.warning(( "AssetVersion doesn't have status `{}`." - ).format(found_item["status"]) + ).format(found_item["status"])) continue try: From f9680ccb78ebf2a1addd4f6b76a3cc776f09aff4 Mon Sep 17 00:00:00 2001 From: 2-REC Date: Mon, 5 Dec 2022 16:26:01 +0700 Subject: [PATCH 041/130] Removed extra quotes --- openpype/settings/defaults/project_settings/maya.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a2..f0a38eee296 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -915,7 +915,7 @@ "current_context": [ { "subset_name_filters": [ - "\".+[Mm]ain\"" + ".+[Mm]ain" ], "families": [ "model" From 605432cc5db24201a124a4bf220200f672f502bf Mon Sep 17 00:00:00 2001 From: 2-REC Date: Mon, 5 Dec 2022 16:30:01 +0700 Subject: [PATCH 042/130] Removed duplicate command --- openpype/tools/standalonepublish/widgets/widget_family.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/openpype/tools/standalonepublish/widgets/widget_family.py b/openpype/tools/standalonepublish/widgets/widget_family.py index eab66d75b39..bd984942b80 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family.py +++ b/openpype/tools/standalonepublish/widgets/widget_family.py @@ -194,9 +194,6 @@ def _on_data_changed(self): project_name, asset_name, fields=["_id"] ) - # Get plugin and family - plugin = item.data(PluginRole) - # Early exit if no asset name if not asset_name.strip(): self._build_menu([]) From ba149de263a5edef4468e9822ee6e44b2f557789 Mon Sep 17 00:00:00 2001 From: 2-REC Date: Mon, 5 Dec 2022 16:55:07 +0700 Subject: [PATCH 043/130] Check asset is 'not selected' --- .../standalonepublish/widgets/widget_family.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/openpype/tools/standalonepublish/widgets/widget_family.py b/openpype/tools/standalonepublish/widgets/widget_family.py index bd984942b80..e1cbb8d3971 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family.py +++ b/openpype/tools/standalonepublish/widgets/widget_family.py @@ -186,16 +186,11 @@ def _on_data_changed(self): if item is None: return - asset_doc = None - if asset_name != self.NOT_SELECTED: - # Get the assets from the database which match with the name - project_name = self.dbcon.active_project() - asset_doc = get_asset_by_name( - project_name, asset_name, fields=["_id"] - ) - # Early exit if no asset name - if not asset_name.strip(): + if ( + asset_name == self.NOT_SELECTED + or not asset_name.strip() + ): self._build_menu([]) item.setData(ExistsRole, False) print("Asset name is required ..") @@ -207,8 +202,10 @@ def _on_data_changed(self): asset_doc = get_asset_by_name( project_name, asset_name, fields=["_id"] ) + # Get plugin plugin = item.data(PluginRole) + if asset_doc and plugin: asset_id = asset_doc["_id"] task_name = self.dbcon.Session["AVALON_TASK"] From a4fc2913b49e7430638701ed89cff1eae77248c1 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Dec 2022 18:43:13 +0800 Subject: [PATCH 044/130] layout publish more than one container issue --- .../hosts/maya/plugins/publish/extract_layout.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_layout.py b/openpype/hosts/maya/plugins/publish/extract_layout.py index 6b9af68997e..7921fca069d 100644 --- a/openpype/hosts/maya/plugins/publish/extract_layout.py +++ b/openpype/hosts/maya/plugins/publish/extract_layout.py @@ -36,8 +36,10 @@ def process(self, instance): # Find the container project_container = self.project_container container_list = cmds.ls(project_container) - assert len(container_list) == 1, \ - "Please create instance with loaded asset!" + if len(container_list) == 0: + self.log.warning("Project container is not found!") + self.log.warning("The asset(s) may not be properly loaded after published") # noqa + continue grp_loaded_ass = instance.data.get("groupLoadedAssets", False) if grp_loaded_ass: @@ -47,9 +49,10 @@ def process(self, instance): else: grp_name = asset.split(':')[0] containers = cmds.ls("{}*_CON".format(grp_name)) - assert len(containers) > 0, \ - "Use all loaded contents without renaming" \ - "(and/or grouping if groupLoadedAssets disabled)" # noqa + if len(containers) == 0: + self.log.warning("{} isn't from the loader".format(asset)) + self.log.warning("It may not be properly loaded after published") # noqa + continue container = containers[0] representation_id = cmds.getAttr( From ef02b58ea9903a77b33466706dc0eea00e98f80c Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:36:46 +0000 Subject: [PATCH 045/130] Renaming some unreal loaders --- ...{load_alembic_geometrycache.py => load_geometrycache_abc.py} | 0 .../{load_alembic_skeletalmesh.py => load_skeletalmesh_abc.py} | 2 +- .../plugins/load/{load_rig.py => load_skeletalmesh_fbx.py} | 0 .../load/{load_alembic_staticmesh.py => load_staticmesh_abc.py} | 2 +- .../load/{load_staticmeshfbx.py => load_staticmesh_fbx.py} | 0 5 files changed, 2 insertions(+), 2 deletions(-) rename openpype/hosts/unreal/plugins/load/{load_alembic_geometrycache.py => load_geometrycache_abc.py} (100%) rename openpype/hosts/unreal/plugins/load/{load_alembic_skeletalmesh.py => load_skeletalmesh_abc.py} (99%) rename openpype/hosts/unreal/plugins/load/{load_rig.py => load_skeletalmesh_fbx.py} (100%) rename openpype/hosts/unreal/plugins/load/{load_alembic_staticmesh.py => load_staticmesh_abc.py} (99%) rename openpype/hosts/unreal/plugins/load/{load_staticmeshfbx.py => load_staticmesh_fbx.py} (100%) diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py b/openpype/hosts/unreal/plugins/load/load_geometrycache_abc.py similarity index 100% rename from openpype/hosts/unreal/plugins/load/load_alembic_geometrycache.py rename to openpype/hosts/unreal/plugins/load/load_geometrycache_abc.py diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py b/openpype/hosts/unreal/plugins/load/load_skeletalmesh_abc.py similarity index 99% rename from openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py rename to openpype/hosts/unreal/plugins/load/load_skeletalmesh_abc.py index 9fe5f3ab4bb..e316d255e9d 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_skeletalmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_skeletalmesh_abc.py @@ -14,7 +14,7 @@ class SkeletalMeshAlembicLoader(plugin.Loader): """Load Unreal SkeletalMesh from Alembic""" - families = ["pointcache"] + families = ["pointcache", "skeletalMesh"] label = "Import Alembic Skeletal Mesh" representations = ["abc"] icon = "cube" diff --git a/openpype/hosts/unreal/plugins/load/load_rig.py b/openpype/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py similarity index 100% rename from openpype/hosts/unreal/plugins/load/load_rig.py rename to openpype/hosts/unreal/plugins/load/load_skeletalmesh_fbx.py diff --git a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py b/openpype/hosts/unreal/plugins/load/load_staticmesh_abc.py similarity index 99% rename from openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py rename to openpype/hosts/unreal/plugins/load/load_staticmesh_abc.py index a5b9cbd1fc5..c7841cef53a 100644 --- a/openpype/hosts/unreal/plugins/load/load_alembic_staticmesh.py +++ b/openpype/hosts/unreal/plugins/load/load_staticmesh_abc.py @@ -14,7 +14,7 @@ class StaticMeshAlembicLoader(plugin.Loader): """Load Unreal StaticMesh from Alembic""" - families = ["model"] + families = ["model", "staticMesh"] label = "Import Alembic Static Mesh" representations = ["abc"] icon = "cube" diff --git a/openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py b/openpype/hosts/unreal/plugins/load/load_staticmesh_fbx.py similarity index 100% rename from openpype/hosts/unreal/plugins/load/load_staticmeshfbx.py rename to openpype/hosts/unreal/plugins/load/load_staticmesh_fbx.py From 012f1097862da05150b26e49bec86f66c7f142a1 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:38:31 +0000 Subject: [PATCH 046/130] Renaming current SkeletalMesh extractor to be FBX specific --- ...al_skeletalmesh.py => extract_unreal_skeletalmesh_fbx.py} | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) rename openpype/hosts/maya/plugins/publish/{extract_unreal_skeletalmesh.py => extract_unreal_skeletalmesh_fbx.py} (95%) diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py similarity index 95% rename from openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py rename to openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py index 258120db2fe..b162ce47f77 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_fbx.py @@ -21,12 +21,13 @@ def renamed(original_name, renamed_name): cmds.rename(renamed_name, original_name) -class ExtractUnrealSkeletalMesh(publish.Extractor): +class ExtractUnrealSkeletalMeshFbx(publish.Extractor): """Extract Unreal Skeletal Mesh as FBX from Maya. """ order = pyblish.api.ExtractorOrder - 0.1 - label = "Extract Unreal Skeletal Mesh" + label = "Extract Unreal Skeletal Mesh - FBX" families = ["skeletalMesh"] + optional = True def process(self, instance): fbx_exporter = fbx.FBXExtractor(log=self.log) From 84689e086604058d93a88564ad2b669c50b5de68 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:39:40 +0000 Subject: [PATCH 047/130] Implemented the extractor for Alembic SkeletalMesh --- .../create/create_unreal_skeletalmesh.py | 16 +++ .../extract_unreal_skeletalmesh_abc.py | 111 ++++++++++++++++++ 2 files changed, 127 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py diff --git a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py index 1a8e84c80d3..424f4563100 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py @@ -48,3 +48,19 @@ def process(self): cmds.sets(node, forceElement=joints_set) else: cmds.sets(node, forceElement=geometry_set) + + # Add animation data + self.data.update(lib.collect_animation_data()) + + # Only renderable visible shapes + self.data["renderableOnly"] = False + # only nodes that are visible + self.data["visibleOnly"] = False + # Include parent groups + self.data["includeParentHierarchy"] = False + # Default to exporting world-space + self.data["worldSpace"] = True + + # Add options for custom attributes + self.data["attr"] = "" + self.data["attrPrefix"] = "" diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py new file mode 100644 index 00000000000..9ce904b86d6 --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +"""Create Unreal Skeletal Mesh data to be extracted as FBX.""" +import os +from contextlib import contextmanager + +from maya import cmds # noqa + +import pyblish.api + +from openpype.pipeline import publish +from openpype.hosts.maya.api.lib import ( + extract_alembic, + suspended_refresh, + maintained_selection, + iter_visible_nodes_in_range +) + + +@contextmanager +def renamed(original_name, renamed_name): + # type: (str, str) -> None + try: + cmds.rename(original_name, renamed_name) + yield + finally: + cmds.rename(renamed_name, original_name) + + +class ExtractUnrealSkeletalMeshAbc(publish.Extractor): + """Extract Unreal Skeletal Mesh as FBX from Maya. """ + + label = "Extract Unreal Skeletal Mesh - Alembic" + hosts = ["maya"] + families = ["skeletalMesh"] + optional = True + + def process(self, instance): + self.log.info("Extracting pointcache..") + + geo = cmds.listRelatives( + instance.data.get("geometry"), allDescendents=True, fullPath=True) + joints = cmds.listRelatives( + instance.data.get("joints"), allDescendents=True, fullPath=True) + + nodes = geo + joints + + attrs = instance.data.get("attr", "").split(";") + attrs = [value for value in attrs if value.strip()] + attrs += ["cbId"] + + attr_prefixes = instance.data.get("attrPrefix", "").split(";") + attr_prefixes = [value for value in attr_prefixes if value.strip()] + + # Define output path + staging_dir = self.staging_dir(instance) + filename = "{0}.abc".format(instance.name) + path = os.path.join(staging_dir, filename) + + # The export requires forward slashes because we need + # to format it into a string in a mel expression + path = path.replace('\\', '/') + + self.log.info("Extracting ABC to: {0}".format(path)) + self.log.info("Members: {0}".format(nodes)) + self.log.info("Instance: {0}".format(instance[:])) + + options = { + "step": instance.data.get("step", 1.0), + "attr": attrs, + "attrPrefix": attr_prefixes, + "writeVisibility": True, + "writeCreases": True, + "writeColorSets": instance.data.get("writeColorSets", False), + "writeFaceSets": instance.data.get("writeFaceSets", False), + "uvWrite": True, + "selection": True, + "worldSpace": instance.data.get("worldSpace", True) + } + + self.log.info("Options: {}".format(options)) + + if int(cmds.about(version=True)) >= 2017: + # Since Maya 2017 alembic supports multiple uv sets - write them. + options["writeUVSets"] = True + + if not instance.data.get("includeParentHierarchy", True): + # Set the root nodes if we don't want to include parents + # The roots are to be considered the ones that are the actual + # direct members of the set + options["root"] = instance.data.get("setMembers") + + with suspended_refresh(suspend=instance.data.get("refresh", False)): + with maintained_selection(): + cmds.select(nodes, noExpand=True) + extract_alembic(file=path, + # startFrame=start, + # endFrame=end, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.info("Extract ABC successful to: {0}".format(path)) From 7061b44d236baad6bc6a4a9f3b814e273dbb00bd Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:40:07 +0000 Subject: [PATCH 048/130] Fixed validator for the hierarchy to consider both geometry and joints --- .../maya/plugins/publish/validate_skeletalmesh_hierarchy.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py index 8221c18b177..398b6fb7bff 100644 --- a/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py +++ b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_hierarchy.py @@ -28,7 +28,9 @@ def process(self, instance): parent.split("|")[1] for parent in (joints_parents + geo_parents) } - if len(set(parents_set)) != 1: + self.log.info(parents_set) + + if len(set(parents_set)) > 2: raise PublishXmlValidationError( self, "Multiple roots on geometry or joints." From 1eba935287fea5a876b56af583eec47d99ad7252 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:40:35 +0000 Subject: [PATCH 049/130] Implemented validator to check if the mesh is triangulated --- .../validate_skeletalmesh_triangulated.py | 60 +++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 openpype/hosts/maya/plugins/publish/validate_skeletalmesh_triangulated.py diff --git a/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_triangulated.py b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_triangulated.py new file mode 100644 index 00000000000..c0a9ddcf69e --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/validate_skeletalmesh_triangulated.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +import pyblish.api + +from openpype.hosts.maya.api.action import ( + SelectInvalidAction, +) +from openpype.pipeline.publish import ( + RepairAction, + ValidateContentsOrder, +) + +from maya import cmds + + +class ValidateSkeletalMeshTriangulated(pyblish.api.InstancePlugin): + """Validates that the geometry has been triangulated.""" + + order = ValidateContentsOrder + hosts = ["maya"] + families = ["skeletalMesh"] + label = "Skeletal Mesh Triangulated" + optional = True + actions = [ + SelectInvalidAction, + RepairAction + ] + + def process(self, instance): + invalid = self.get_invalid(instance) + if invalid: + raise RuntimeError( + "The following objects needs to be triangulated: " + "{}".format(invalid)) + + @classmethod + def get_invalid(cls, instance): + geo = instance.data.get("geometry") + + invalid = [] + + for obj in cmds.listRelatives( + cmds.ls(geo), allDescendents=True, fullPath=True): + n_triangles = cmds.polyEvaluate(obj, triangle=True) + n_faces = cmds.polyEvaluate(obj, face=True) + + if not (isinstance(n_triangles, int) and isinstance(n_faces, int)): + continue + + # We check if the number of triangles is equal to the number of + # faces for each transform node. + # If it is, the object is triangulated. + if cmds.objectType(obj, i="transform") and n_triangles != n_faces: + invalid.append(obj) + + return invalid + + @classmethod + def repair(cls, instance): + for node in cls.get_invalid(instance): + cmds.polyTriangulate(node) From 9a14dff1fa1274ccffcaa921e9818fa1b5ff0ada Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:51:10 +0000 Subject: [PATCH 050/130] Hound fixes --- .../maya/plugins/publish/extract_unreal_skeletalmesh_abc.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py index 9ce904b86d6..e1f847f31ae 100644 --- a/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_unreal_skeletalmesh_abc.py @@ -5,14 +5,11 @@ from maya import cmds # noqa -import pyblish.api - from openpype.pipeline import publish from openpype.hosts.maya.api.lib import ( extract_alembic, suspended_refresh, - maintained_selection, - iter_visible_nodes_in_range + maintained_selection ) From 63508cf495fdf2a1f66234a855197fd56a0a072c Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Mon, 5 Dec 2022 12:58:27 +0000 Subject: [PATCH 051/130] Fixed missing attribute --- .../hosts/maya/plugins/create/create_unreal_skeletalmesh.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py index 424f4563100..6e72bf5324f 100644 --- a/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py +++ b/openpype/hosts/maya/plugins/create/create_unreal_skeletalmesh.py @@ -60,6 +60,8 @@ def process(self): self.data["includeParentHierarchy"] = False # Default to exporting world-space self.data["worldSpace"] = True + # Default to suspend refresh. + self.data["refresh"] = False # Add options for custom attributes self.data["attr"] = "" From 8c1a63d15c96c10949b3a8146f77b26bd017819c Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Dec 2022 21:59:00 +0800 Subject: [PATCH 052/130] fix the ancestor issues for bbox selection --- .../maya/plugins/publish/extract_proxy_abc.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index fd70c8506b4..decaf410ac3 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -96,13 +96,13 @@ def create_proxy_geometry(self, instance, name_suffix, start, end): start=start, end=end)) inst_selection = cmds.ls(nodes, long=True) - bbox = cmds.geomToBBox(inst_selection, - nameSuffix=name_suffix, - keepOriginal=True, - single=False, - bakeAnimation=True, - startTime=start, - endTime=end) + cmds.geomToBBox(inst_selection, + nameSuffix=name_suffix, + keepOriginal=True, + single=False, + bakeAnimation=True, + startTime=start, + endTime=end) bbox_sel = cmds.ls(sl=True, long=True) # bbox_sel = cmds.listRelatives(allDescendents=True, fullPath=True, type="mesh") self.log.debug("proxy_root: {}".format(bbox_sel)) From b868e4be1ab4e4d72b83c3840f7b86c223383bfd Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 5 Dec 2022 22:00:36 +0800 Subject: [PATCH 053/130] fix the ancestor issues for bbox selection --- openpype/hosts/maya/plugins/publish/extract_proxy_abc.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index decaf410ac3..f348712d7c9 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -95,6 +95,7 @@ def create_proxy_geometry(self, instance, name_suffix, start, end): nodes = list(iter_visible_nodes_in_range(nodes, start=start, end=end)) + inst_selection = cmds.ls(nodes, long=True) cmds.geomToBBox(inst_selection, nameSuffix=name_suffix, @@ -104,6 +105,6 @@ def create_proxy_geometry(self, instance, name_suffix, start, end): startTime=start, endTime=end) bbox_sel = cmds.ls(sl=True, long=True) - # bbox_sel = cmds.listRelatives(allDescendents=True, fullPath=True, type="mesh") + self.log.debug("proxy_root: {}".format(bbox_sel)) return bbox_sel From 22e664c96e20bbf428b90b46d88383ac84e5d7e0 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Mon, 5 Dec 2022 14:45:18 +0000 Subject: [PATCH 054/130] Indicate sequence or single frame. --- openpype/hosts/maya/plugins/publish/collect_ass.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 7c9a1b76fbf..3ce1f2ccf15 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,3 +1,5 @@ +import re + from maya import cmds import pyblish.api @@ -27,4 +29,10 @@ def process(self, instance): instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) + # Indicate to user that it'll be a single frame. + sequence = instance.data.get("exportSequence", False) + if not sequence: + group = re.compile(r" \[.*\]") + instance.data["label"] = group.sub("", instance.data["label"]) + self.log.debug("data: {}".format(instance.data)) From 89c5fdfb27c40e1a9797730830ef5ec8e38c4af7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Tue, 6 Dec 2022 14:31:00 +0100 Subject: [PATCH 055/130] Fix: Template path wrong normpath for cross platform --- openpype/pipeline/load/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index 784d4628f3a..bfa9fe07c7b 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -555,7 +555,7 @@ def get_representation_path_with_anatomy(repre_doc, anatomy): """ try: - template = repre_doc["data"]["template"] + template = repre_doc["data"]["template"].replace("\\", "/") except KeyError: raise InvalidRepresentationContext(( From 505cf706f2041bdeecf6ff04c572276e3d446391 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 6 Dec 2022 15:16:38 +0100 Subject: [PATCH 056/130] DL: refactory env var processing --- .../plugins/publish/submit_publish_job.py | 91 ++++++++++--------- 1 file changed, 48 insertions(+), 43 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 45688e85849..3e3ef03e66f 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -126,22 +126,19 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "harmony": [r".*"], # for everything from AE "celaction": [r".*"]} - enviro_filter = [ + enviro_job_filter = [ + "OPENPYPE_METADATA_FILE", + "OPENPYPE_PUBLISH_JOB", + "OPENPYPE_RENDER_JOB", + "OPENPYPE_LOG_NO_COLORS" + ] + + enviro_keys = [ "FTRACK_API_USER", "FTRACK_API_KEY", "FTRACK_SERVER", - "OPENPYPE_METADATA_FILE", - "AVALON_PROJECT", - "AVALON_ASSET", - "AVALON_TASK", "AVALON_APP_NAME", - "OPENPYPE_PUBLISH_JOB" - - "OPENPYPE_LOG_NO_COLORS", "OPENPYPE_USERNAME", - "OPENPYPE_RENDER_JOB", - "OPENPYPE_PUBLISH_JOB", - "OPENPYPE_MONGO", "OPENPYPE_VERSION" ] @@ -223,29 +220,41 @@ def _submit_deadline_post_job(self, instance, job, instances): instance_version = instance.data.get("version") # take this if exists if instance_version != 1: override_version = instance_version - output_dir = self._get_publish_folder(instance.context.data['anatomy'], - deepcopy( - instance.data["anatomyData"]), - instance.data.get("asset"), - instances[0]["subset"], - 'render', - override_version) + output_dir = self._get_publish_folder( + instance.context.data['anatomy'], + deepcopy(instance.data["anatomyData"]), + instance.data.get("asset"), + instances[0]["subset"], + 'render', + override_version + ) # Transfer the environment from the original job to this dependent # job so they use the same environment metadata_path, roothless_metadata_path = \ - self._create_metadata_path(instance) - - environment = job["Props"].get("Env", {}) - environment["AVALON_PROJECT"] = legacy_io.Session["AVALON_PROJECT"] - environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] - environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] - environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") - environment["OPENPYPE_VERSION"] = os.environ.get("OPENPYPE_VERSION") - environment["OPENPYPE_LOG_NO_COLORS"] = "1" - environment["OPENPYPE_USERNAME"] = instance.context.data["user"] - environment["OPENPYPE_PUBLISH_JOB"] = "1" - environment["OPENPYPE_RENDER_JOB"] = "0" + self._create_metadata_path(instance) + + environment = { + "AVALON_PROJECT": legacy_io.Session["AVALON_PROJECT"], + "AVALON_ASSET": legacy_io.Session["AVALON_ASSET"], + "AVALON_TASK": legacy_io.Session["AVALON_TASK"], + "OPENPYPE_LOG_NO_COLORS": "1", + "OPENPYPE_USERNAME": instance.context.data["user"], + "OPENPYPE_PUBLISH_JOB": "1", + "OPENPYPE_RENDER_JOB": "0" + } + + # add environments from self.enviro_keys + for env_key in self.enviro_keys: + if os.getenv(env_key): + environment[env_key] = os.environ[env_key] + + # pass environment keys from self.enviro_job_filter + job_environ = job["Props"].get("Env", {}) + for env_j_key in self.enviro_job_filter: + if job_environ.get(env_j_key): + environment[env_j_key] = job_environ[env_j_key] + # Add mongo url if it's enabled if instance.context.data.get("deadlinePassMongoUrl"): mongo_url = os.environ.get("OPENPYPE_MONGO") @@ -309,19 +318,15 @@ def _submit_deadline_post_job(self, instance, job, instances): if instance.data.get("suspend_publish"): payload["JobInfo"]["InitialStatus"] = "Suspended" - index = 0 - for key in environment: - if key.upper() in self.enviro_filter: - payload["JobInfo"].update( - { - "EnvironmentKeyValue%d" - % index: "{key}={value}".format( - key=key, value=environment[key] - ) - } - ) - index += 1 - + for index, (key_, value_) in enumerate(environment.items()): + payload["JobInfo"].update( + { + "EnvironmentKeyValue%d" + % index: "{key}={value}".format( + key=key_, value=value_ + ) + } + ) # remove secondary pool payload["JobInfo"].pop("SecondaryPool", None) From e989db4e004a6fbc9487d74659787e02c7e2bad7 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 6 Dec 2022 15:27:39 +0100 Subject: [PATCH 057/130] pep8 --- .../deadline/plugins/publish/submit_publish_job.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 3e3ef03e66f..5ed8c834127 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -126,14 +126,14 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "harmony": [r".*"], # for everything from AE "celaction": [r".*"]} - enviro_job_filter = [ + environ_job_filter = [ "OPENPYPE_METADATA_FILE", "OPENPYPE_PUBLISH_JOB", "OPENPYPE_RENDER_JOB", "OPENPYPE_LOG_NO_COLORS" ] - enviro_keys = [ + environ_keys = [ "FTRACK_API_USER", "FTRACK_API_KEY", "FTRACK_SERVER", @@ -232,7 +232,7 @@ def _submit_deadline_post_job(self, instance, job, instances): # Transfer the environment from the original job to this dependent # job so they use the same environment metadata_path, roothless_metadata_path = \ - self._create_metadata_path(instance) + self._create_metadata_path(instance) environment = { "AVALON_PROJECT": legacy_io.Session["AVALON_PROJECT"], @@ -244,14 +244,14 @@ def _submit_deadline_post_job(self, instance, job, instances): "OPENPYPE_RENDER_JOB": "0" } - # add environments from self.enviro_keys - for env_key in self.enviro_keys: + # add environments from self.environ_keys + for env_key in self.environ_keys: if os.getenv(env_key): environment[env_key] = os.environ[env_key] - # pass environment keys from self.enviro_job_filter + # pass environment keys from self.environ_job_filter job_environ = job["Props"].get("Env", {}) - for env_j_key in self.enviro_job_filter: + for env_j_key in self.environ_job_filter: if job_environ.get(env_j_key): environment[env_j_key] = job_environ[env_j_key] From 4c102b51d6a50ed362ed164b76a72a67349e88f5 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 6 Dec 2022 15:41:03 +0100 Subject: [PATCH 058/130] :memo: add documentation about testing on deadline --- website/docs/assets/deadline_job_version.png | Bin 0 -> 32810 bytes website/docs/dev_deadline.md | 38 +++++++++++++++++++ website/sidebars.js | 3 +- 3 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 website/docs/assets/deadline_job_version.png create mode 100644 website/docs/dev_deadline.md diff --git a/website/docs/assets/deadline_job_version.png b/website/docs/assets/deadline_job_version.png new file mode 100644 index 0000000000000000000000000000000000000000..0b78d6a35c9d1a9a26885511a8c171695285293e GIT binary patch literal 32810 zcmZ^~b9f|C`z@N8*qVuLV`AH!*iOf`CpHGLZQB#uw#|;y(dqfV-?``B=eg&Pdb+B* zYOD9&YrX4T9jUA+g^YlQ009AkEF&$h0s--<9(>Tl!GQ0mE!&lXKR&srNQpvJO%j}d zFQ6<$6ht5(YU2@KjiJHU@Q%{jE)WnXegBS6Lk=Zo5D)>~GU6g?o(314h(=!*8b3Y+ zU=0~pLH@vRBRo)FR;{l(7ociHC*t{`I*g!-?AI%p4Y;-qodf_ z!mz59)%koFxbvAYp`?3lO(S>dmkZq2*T$y%83SN!#)pp2RVtf{vjMzUM#z1S9pK@7 zX*_ksW-+aEX)x6y3-|DvIbI0c|NZ%>FaBdJb*El_J=O48#M4Jgs>N^b=2~4N1D;$e zE(W*iWea)7@N$#TAGBG@8t;MnyLU_HW9<5JV=64bajuo>a{XFC$TMlzJ^ZYjW928O zNQ$?^la*}u3s7RmrLx7v0pVj?J8JFC($lz?(?P|}DSax&iC;S{G~Su8y)T~0b)NM^ zcjvyl;K`!&vX*Mn9<`Xmi+IOn=|!XU=Z$=qTHktj@03c{hENyz>oZ$^-2KXZDqh#4 z)#R<~)23C%5cYe3bwF;MgMSW-ihE4LsGEDZ%L(Xw)?A32&)2Df-PJ$|=oaT;Fxfy} zjS_Gq6Ssv2esuG>=+_|OQ`QyGOTfAcH*-mXb*|qs+`;PUBcK0tRVlV0)B9?!=-tX) z&s91DpyGM*F|_oIy|Y@FJoJWax{=I!^(OvkEE@<4@n%YX3AvRgxQ9n|P<2}fuu-vI zYL$8n<13hrm+*hQTw*_DllK!9FidXo3+f7Et#*m=Co5!W{?-wDRcQxf@L$OWW; zvq@4l?PGs_Cv@*j<6=6!shc{kSae0eAI0>CtTdZm3T4EiO9cdnjb`s1cqq(@`yThr zYyWoac}^yKIak`7y^Ll{eeG=S+4?ysFxX9=H;OqMN;U(W?+mJtD3Y3vb z|6R<3`oZ!d=?lF5&FXbyW)}N_H0YmZ?L{4<6rbBNK39A7?4I%1iqCQ~E9WRN36;U- z{28z%i2aeXDz@~@CDar50mGE-#8%b8T{opCxM$OHL*}6;OR(6zRDxLN$njy2O%xk2 zzSO;RX_&bpwRB{nCt$<-ZORuP*28|_HozOd`)%wwg>xc2nt7}rI&*5bRmeYM)=hD8 z=;?}UYN&AuZ?t>xD=+L&;rE$;$=t#~ryvH$|v>_+mqQ zP>2noUoTIalIO97=ZdvAS&5SL!DfovQ2t%|yIAwq{7U?QTzmHy2D-V9{%)Qah008 zI@jLW(1iR1AO=li5X{UF$gSFUSNY^}Q2nY;$<<-<1aHP;?~ zUr!wFa)jFCZ@?CMZVs<`RvMBoqv&|VcN89&*3 zkUjBN+u=`#1otW*y7(6WxU=+7|p*y-?p z3v%J^F&}K5eYN#>S0&cZmXl7=50CjNHPd*8ivdu(vgsbkW=@T=VQ$C4#bo;Xd#WQS zfbKNi2eP{JN7EM3Kr6OkL^ShA!I`vOHaE&qz^;5pfG3;##{=bgfDeHn7c1u8Ue=PN zDvslRbM=nuZz_{>n$wEv>z&@8#bmF^*fX!M=)cLnr^UaVXDN9bWX{JKc1Ee5bPVeA z!|X|Bhoc5OLb-Q5d&##Pf5~ujqT=j&ka_LCt)?b*Yis+^k_NtUoi0}c$8RMqropJEjdT+8wU!( zYzJ>|>(>%y86$q55!uR;K_N>_seng#{AQKxgzJ?Zn-sFUbK7{}x#zN(<*()Ic#=EV zJ>@zO`WO8Nu7(ezPeiMBdC(%92#Xj+SO5GLxHyqtP zu%HMMIBIy!tsBBq%<;H8ct2U$UilF9E1I@j`0@0D&=skxSHi3Jw|GydBdU2J{Qht{ z4YZKok$DIBELRAfjDDh9c_G>jHN21OE)z*4a6u#mLQbJ8JAL63S^v4>GGHY|2qJ+K zBb-vovO+ZUU9|ceE(IL?3AtR~iWSS;MW2=mevrlN+ThNB+MV+iN8#hB`r5#or>gr| z!*d*hUjz+G1P8&>ms~1rB@)tQW?Z7>veCPNP*9GY4g8()@U}5*EI_wsYkHFJiXR;S zuBoh+s}SU-=*4ufHa_2Lg%#@#MY;@U^N|FAb&?+k$F=&54eDp}Vvpw3(wrUQpnw zbK@PY0E;Ydx0Co%Y1$@Ws==if81NEs*_&Zi?;0}nCI1g&x%6r+XP*w@%Scmp-c5M? z_BPv#I2$JTbx;U*@3>O$d9yVi5yLo9=b7>O@*_jjN7YyEJ@Sy)*6&tkdh_9f9Zy~F zd`HAltX_ACS_b#s5-ITD9{NcVu$dwl^lHuJUBAN;Zy)nXk?$PyrSKjG>|`@_YS+#cmV4dSzUTxvSm7{OoaJiEBiz*pGSM*f7F!`)OWbG*BFyhee3d-ManRDoXL zucR|BpF?IB)rcn3F~DHOJVbH&cSRfrMZ+{8@D?8vKJ4pXP<(v3DUI$Asb9;eV&$4S zoJaID*V;vsDCgNNS)%}z2}fx%j`3k`x0{ks;Jr`)Mq|2v2z3*G zWLlWp^7Xh~yq5=Qy-<`InGu zBa+X)EBJv&ZF^Gpc)8bnINQ4u{l}l?DfnC$p|=?o>ZoA|w93GH=$ZS(yyq5mSm@af zX{!4Kj0UY|1zZO9N0rulEfRnvcxH0n8L+)9M`v@g?{IZLJG@@p=es|>7;g1!87}QO zG`w-W+r)KV2N?8E;b$+bYV+&hfA^Wt%=tsP_^P@9lB;rQD7@4n>k7m7Qw3Djgh3!U+3ZsGxLQ}X3#dW(Poch3X z6MCgR^}kb{uL2LhlH+m&WDirvUHew~#i}>L8SMmWwu;rq?Gkh9>&Exn9)t(e1<+b9 za=^3Jns(?L^x)m$k%O$_1Rd<}cb`ukr&_pFjnlPR>9D=LHjvdX@cG}5S^V!! z(E#z^t}Y^t4VoUO0pkf=aInQ&(&BYtHbh+4gU|WK6WrqEJKJRYjU0D}N3;ytzh_Qj zv0s|<50hul;Qz1k{s*O`LjHq& z|IgR_X91gEx{k-^(B8LcHo2W4E~Z}5CSJCZbDzOY?M>+MnRy|7HM#w5`L=L|!+Pp0 zpUp)~6wgEVZw$mBlop478r$60A>Lw{tl9|mOlnSHO#G)!-y&eq{xxWr98&mSn@Qv0 z{)?TMQYo2M@BaW=6!h;L@PE$zEdS?x?|(zXKcEP?8=8B#B@PRC>kZgFSW@g6z^rHW z?~i_JmTJ0xo__8Ac)J3~p@HI*0zh-%li6sGEkA7HfLB&Q&@nkC`5ko@HMo<4=^NUF zFAxshKq>>i213-Ow>ra+6$k%Y{Ca9Q>d8b+1KM7BX*sQgH(-1~e|dG6wMoc3;4hUz zGz%sO4;>)dcJr|2u-E;5);*rZf;P-wpF!v9u0c9<4(>|^jfoC{CyaW^ff{ror)8*7 zV#F74?{klk(pbi{7a||>sP?`2uS*i5i_Tl~NdJL}03abmoA#3FHpg3phgTlSvb^K( z`B8=F^x|@mGD12ltVW|adux)4;`q?7LI;r_P(`|!>?i*j%BgL*`i#iy@U$9QvJrwm zd0tmgYC3qjmuxg;Tw4vk=EL^vz(A~hO-En46GUaP&lUOW)pI4UnKbP~zrunN08g^l z{g{;3srq0P{tW{u>|9fJ+p?GxL=mDjO=<{F{B_9yC7T)lH%%e6TI|MJqQKGCt2B7OSlXbp*)5F~pzU&EAIUQS=ZBq&Su{Ph1g0^g}wC*+BEX&VujiHaqg$b7^14D11;v z*h>p6RLy+Q)0`F3*2|>*dTWL>Z=l>a4|;-^^uy2OJ<;YAVsmUQUoHi#D4jGCOV!6n4WgPvV)5#Bi1SSqEK1 zeL`3?k+DnWrOO}LVo!H%HqvRy_9DL~@R$?J4x3zbFiqi0d5>NxG4QPHyfuNt_W+px zzK%&L=&84gY^7d-UiwYLXaF;*N$fdm*{VsM5oom*`@p)FU~Hwqx@vKC`=D5EkFYMO z?y>JM5;Y7gaI1$f4SaC(;L*!w8gla2$ZXGo(1#Ht(pUx_c3ic?n+|F%MTyH zFZ3*ptr##Rp8|{X>=CHxg8_hZEl#3V1qsXMiC}Z#IjY$?>+jr!c<*?aQ-KrlRE8cZ z@>{h?i9O%iyFl=yUT6G~U~0x`weCaMhUbEP0VGNtXxu79UbC%v_h z33IEw6E~cA-b$7;Zs6uc0hJ%XV9*-CYo&8B+<7Lr{f_IM4keK%xT1OEwol{*9sY|m zk@u-B-N}q{!MLHUZ^lbHB4*95q3X-2MujnYqV4?j)#6;zQQXdc#UA#q%5KaH(6kn!h`7)&X>6=vx?aaM4OH@d z%{XxEJZ;0dhzgm*!Se4qWBD5AT6^r?`LHWp)n!0rL5O5i;(G4=3*a^9Jk!-)>0yVh zh}W^)Ovu>nPq=wo?__sk_jKe!^H3=S1_c&*Zhin=m7JAYhy?y+>SS84y}V0WKM3-4 z_~J^V)W3L?Ae{BCnA+kGhU|92wh{$T#)W!dBdtrtlx5%LgPwf5AnKg+nn;~I8pf{v zgAwxS|8KSmf&T_NioD{TiN{v6cqw2*QQ{bOwQA8S48!TRguCpQ<(jLIz z*Hcq8_{KBY8sIffOnn!!_DQ15C4hlF6jYTS#Y))TZsSNDRqm$-E5~wgXGhnyO#Ueo z$*zt6br0$@$7zP0XnPGT>uzexQgoGDdzV42nKAyGrD@VuXf6KI;~U42Rx)j?2s-hH4jADXZ%=mqI^f}5_05)9Umph-79<#@zlMRy#WO`Dm%iWOZ`CJ z%N^P`rBxR-5VdTZqhTr0r{R-Rd%>8*#|Niox%-VP>n&qTp>ql|Vt)D0I;afBE?di+ z!yMs}E=yI_dx;qDi{^0yA*USqL~cX~O4rTYZvr#84zW?MtX5heZ$jsT%JyeCB6&4j z_~A||6w@iD=Qc))p@%e~m(5~Uhh5fJ_dW%!#~{xM>{()9+7*cdw7cS3>`5f$%yiJy z$ys%dE1V*_=j{X+a8`~vRlt}82TFheR$XLhSWF7ee))${jqC(2K1;t%7mN6a3sU=* z8#c&*p1{m=TWTnNX{5xIloCcH#Vu#uLGtU3_=z${k!PoO$j zBS}n8=+sr#j=PFz_fG@E;pL4lHh|XGW@tvqU>n+4OBeV|M2~2PKv|+nzm*p?dp}%w zWr2^F{Yv3esbSNJT-AXvDCQy>`M0O7$fq#i0 zdTTg)GdwmSkRgl}V)o~IfnyC;GpmMh6^2ST(bOFEMe0{h4$L`w6bx?IvjcYuvF*S6$=MQ?lvC(%d{=(DMhn?9&RafH9si(w;j z`OvzHCnHqzutHB?^vMrtRP$L?!SH*Rpya!Y$$eH)?|9jIB#ZNdg*{Z8|ETvna5UA? zCLhZgpp8Ri!8E|v$a{ukWi@fSr^{$%(OQB4V)-ukFSIKV5L07|KGItz+%g~%&G{j& zEyaVS@p8^Fby^?HZ7I8JirPG>HWcKt6;@oa@ni!Gr-a=rNtrV{a?l6Hmyug}|fQ zLCj@>F2#xD4OU4_&|Z=>O!v=FY|C~3QI8}2fL(*05l{0!jiI6MOfT$qYU&*>-Qacl z{GOcf07s}$mIrt8{!585&4xYZr!62nVTI_oG*o9yY69?lnd~XMH@|*nkowyy zD;MR?a_)5CAX3Wu()wV^gQsyqh7zmZ`Dqa&p8zer(8~4Qc74NZy`UK6Vwysrpv?q3 z{Ey*v{}jx}l`%5s!dp^n65Byx&@D-h{2upXtXf#kr>U>U@wYq6I6(IImd&%;{u|?Y z8eL>My}MB@k=Cf|D4N4K}AoM_4W_H36c@q(&#KuI*&Ni*tMVHw^;& z_#RyA4|YqQ*OaayVmfVu_C8_BgdKV?h?$pjZa|m&hX?6hNI$ihk0KVabkqOLH@5EE z+ye+pA=j9_eiN{sIr-r^@?2nCI$w2nA)%0>qh*1@;^zLM=0$KGNd9lA19q%lo>BX{ z{da_bH#WuFyOAq6Lc0>LYvgN+D5xa-H`A@$B`gJKs3buoTAo{fL!ZuL@L3%Uh^p}1 zu;RKNvv?@}!|5TlKTG1&MF7Ctg#>8)Ke|zf!8hL!ZL4U3V^JSebZ~zZv0$W9fR=-M z{Jb-)lS-{gvfpl|PF$A{ezbgHg+MNAfHJX4P7nA^H2%qJdSMbs?;Ry_yRzk{4 z1wv@3!F(P){49fo!(!g{EC@ z#$AGLTx@HG#sv;fGs3;O2nT$|7^-U=c6ZE4@6TA8RDZ&%_DO{19Ht`3X{eA>$1z4X zIAqFDfd)h$R_G<|*!+@ZYyPQ8Z?ST_ z;yyxT1-OGL)>UoP;nZkR@zGi*YaNc~9|2?wgh%SSjg-58wRo_KnrK$eBAN-w>-p#l zNv)Bqnrf6^zZJ1b&e3V3&n#=~1EUcJu5>Zk4?#kDl#Yg3X{n?KXcW5EbTg2)hoGagSP0iu4eRb)ZY&> zz~+R}6Ez~SYJ$NOr&Mb`2Wuvj?z}6I^2TTVIJ&d1>mAe5D(xtO&BL-^a@%xSHWUMJ zxENauh__u@4Vz2N$Q|c8*QpS@I9Vq{q1HRO2z!F5-ocDrv{@O=duSVxEzfkNPZ&4g zahK42v3=fsFZfDyh?PYv2HGWj<;Oq?Yfc{-xPE9l}7_QD%lY8t8tpY=Df*^SuTxQahh%#fy>rcE9yyl=G&wi|Lfhw9hew#o4eHgo-qNo%f=SnoRue z-uB&!)sub=R`kCmmZ%n63HEJzfI883?T-eJV_Syx&DRds;9@R+%p)`>3wHNQ#O-i03l!=J{6>%MU>>`?c+zi1y_s7St@Ko&vY zXA;!8K@|%>manBV;R(XVjAK|`l5(encFP-rSbH~Bee>j!eP%ssd|!QJ<(~^wXhrZ) zv3%WBYq^XXL}dfG7vwO`^m^@Dq&oG^2iYl)W2`jLyd(cF(osVP@BH=i>Sxai z(tG}nO{BaC1JsHp;h~qKsd%Iw50s(!&8D##Na2dtvY4RHOK84=gNIV(wqK|E?i9S+ z87Fyhd}xGh%;Ijw))YP)ANOpf4H~URS>d!>U}KEdP1AGp9|E*Ac(>eCS&RI*OlY}i zMGgp^{v>}(dcX>9+6Z^L)#uu~whs@pvI+^gYNIVcrs~NJEOm(iu@9Db7Rw~Dvm7IKYZn7~*_xTURPjIArEf zi$Jvf!Niv7dauMmc_SAkl|8Z6RHune+URn*p?-ekb(4N< zteqTC5}kBW&5SJXoA0Fe7Hm^PBUp|Lv)FoJKUClk+p=$gKQlm+>JtT#$Av+Q)JR8v z0%IdwkRutApK+3E=%gaBffRiCjg7lE26`IE()<1LLT*Li4;XI5PY_9uoB_7djd~Wm z=q=#a-bS~r0J$0sAbPU6u+qNj>u``ghD!zdZ$FW;g9pvGe;CaLGc)c95vw3PqsKTQ zdqpc&6G?_tOZb`X`(&_%3O`{A{=wbSV%V_*fAQZ8s$KDAZK==B0-0}ZVCC} zmm#kCFBfZtB(d#`R+^x`p6XC1P&xP~^7C0O->Px+#6t{sv#{WtdT zTjttn&OZrM8L5ti+^H!KFV~SCkI}gA`fs$q8=Nf(X>n3)tymz2J$M9rr%k3qQ}Jka z+}bNt`kmp{YvC0lxZedx^D^Kb`2-d~|H#%KTwi0X=s0sHg3m(5T|(M%0SiWmY!C%U zEtzois-_iIpD3;?+_mJ|?pk;;mchh+9_5!g1UM}znr+%%Z_?5SnB-Gh*M>idZ>h(C z>Jbe|WdZ96EPF3XXdye%|2IBQHXhPYT-IDfnWGNnQ|C z>}KlrgzaO{-;Qk%Y*W<+SKM(=*5zkN_>B#mU^`Jz3SCKImC-!M2oX67LS*`zD;Dl} zm}-%RL;6*czuRuA`@$FK@uVfu{l0K( zcvMt$s7$dWA@6@8hTlndxM^G8QM!dVV7oHn!p9Irm8R$DDWq`*cj}%!&ugDskXYl8 zZxaN2$7jd?EFL;;;}1(Dc3#X6Hb5F?S0nkv6Kmc!FY`F{@|XrCI3W@RykePYH4kE6 z4JK9UIz3`fbdexIEE)JzBFN;v+}92fJko!295apINkXcpU+zZ-1i$Y5Bz$7JrYY`Z zTdIJqxNxX;P9eRAI#n*gQ|&N#;+l}~(?*N7O4nC~4q9pHE%*|WQe&jci6BIu9t-Pl zybG0^T??fx`}ty;sN)B-(W$v)nz?*oCw9$I!sb`9|JE?mQddXtw7M3_5PhB%MF>mm zpkcskjfkKuhnGg8MH8#fBc1QK;>+I!vgU7gg+E0-E%fQdy#+2ORW8e>tu9u^oBC57 zjr@HbYTo55Xf0*mVcxA`bY%!`rj$O`PT{mmLjXRshclgf5k2+(8M}LLnfZI`GcB=( zdrPdT0loBg+R;s%9gkE_(AK;lSiEg#(Piw+JcJH_WPvM~DwX(TWk#e#epvF`Mnrf# zCJ54R?sk!*M9U93tKE+k{@a=H6WOJK0F2TUo#=P;(6#Kog{|GmZIaV$Zz99tR*m8p z6O=0d9a9&N^->9Gr!bc2C)KCB-G0FG2sT9CExSt=Df$9GS&sIE_{*Udfki^krRVXeW<|gOIM>Fq;zT{4ggxl){Fht`-azP^$QQF{K|Yb=k_a%MDskXnBLV9W3ib zI9Gvn&)s`wCV6Z-OYmvTXG0J96nNGB9mi)wP4Cv4Z~j6m$o7JKY5TbjKPIdZUP`5t z@ma9>%E=`gHCT7`1%|2L!eTwb>-NTRp`_mNuJds`*I%6R#iWvSFc+e6aoSgqa}#MM zzR_-0TkO^b@h;8c{pKYm^*gNgjJC9-=9}0uJ4jcno+R2GWp81|#pxE?G`+|}VCBcm z76?nyVYP%0Qv^rY-K`_4bt^4ebIqf)+62L$_H5QCRkgb3_*i!-*Qs>xZpmeM{m2%8 z1MtM2$PVAdcj#YlH5{l6q-?P{n3^CJ!MSwJ^#`Il7I<JNA78l7*I6_?hxX}& zmTMWq zLxzRNELL8=+41j5twR%jSsX}V*bz&<#>2C!TQSK7-lZ#N&`TS!MPAjr4+HlUpP_R< z(6(Ya$^EDl5JmA^^d}cQ-)Yg!f)c7C0E0wQi;#$w4lZDCMRD570Jfp+0~D#B@x(c5 zz*Dd@H$%bXHXIukiO<-!$Fh|!B+-}ow9LQ)=1}cPI;b-Nt%Rs}nVpc6QkIvez4(9L zkiOU+On~C2UeTORf}S2U%e+)%ZEw2q{q#x_B*WRgbnT&UPcG#<)hu#>kwHWcG(a@&T$sh$QH+LK-Y!D%S^q@M{NGS2KP56?A!f-1$M1GV| zx?rOTa8(E8$YS8OJ22E&ptN4ay%am}`sfZUmjTEgc*t`sHY=4vOX>pgV#Oxk8%(SsxA&b$l-qzZrl*IfC_Y?Uk*Y1j z0KJ~UDJ(;y1pAi;NHYYy>8;NEpsKI-$4xAPM_!csv$_R1lDRuOs5lk(`Tl!v6NyA? zF>X@*@DpkynN~yB5uvvt>q6C<{`vTsul;A^d97Y517H4Mjr5CdrZk4--Ji>y$q3h) z-(c_MDCRC&AM*7C)ru*9*J&rs#Eaqg3}^^p2_LEW8UAS!uTW`S@UO69s%0CLbr`5` zrg|fom%98d*!!|P2xGgs{sY-Y!wA?A?RvgUf4}ufEd4zw@!(kbaImn2dgqM%J1_oo zMQLRKZl*SEL9_CK6mMWgtHd>sLmg<|!gt`z`5jy8WWwQ?%s~6jF`8}vE-=7{y;9he z1yKUsDojCP`o-7qxH!$^zWnFt+$+QZP-3>f;EPAHmvZM{o|*{%$93xv#OPQSo*%Gu z!8CHuhYQEe0DG>+9?A)VnuFdty7{ZOc3RFrUbGK*mY*t z3xsrKy_X);`Jz}F);-88+2j4&^{^C3k`wlqTACR1RQaxOpYp|Yny#+;>cO@;7KP>_xU4G-#eT#rh`gkW!Z*0oRYypVUuhj@{qj4+ccd%`Mt?^n%EAWX8}8OX1|8U{P=4o zpBfmq#{gIb*yAMP|6?9_gXTkiNJA_Y{nHvK#kD#_qjlS)2S5miXVLI3{%sOAr*$x` z8?-ck#Y_{?FV#HuXV2BvmXE)wiSPvRl5O&rDZ$*)Up#FJ#SBtZvQf3-Pz5b3dX#!& zJ93@dqnwbo$Vh*0$U=ODv2R0S!&U@%?~%&wy)pZRK4i3I8%4p}oMt@oekjH>{$gy- zl|HNnz8EG=^fPX%^Cog<^7C3rSVzc2b^v*Bu<_Hj$K;5sG|jiIDXB>Oad@1kK<)&) z-Y$vvD_iILSN#*)Wp7E3&B`&A*NNT2C7^Y%X-;YPg`y+*pa!yHQ>S{75W39d- z-t3K$B@$xAdSgaB-M17i(O!BSFEMp0tI>K*zb8(wT^Y9fCcJxvNGD(Xrd;f7Xa$KU z#o866AJ)x6vzMRat+@8SU@3~AGYjIfu6})N;~7|iT!8BVVvyW1Q=|kja2KUHs6W8j zNp|av)4HX=U=|%=x1gcCPM*_AL(rb35|6E#zUFOQo_Lx$k6-!dU zQA4Gl`i`E(c&G3{tpoa22T78uC@+G1%%IbkT(cs)sJc?)`F_Hd_0iN9MbbP~HXvmk zhS@(lNqJG$xY9<3i66T=AacWCLF=@ofs~n(qR$QH=oWNsl@wj!YAuNSQ z5K(3LmcY8ZnXSTL+Hvmt6+xsH_+5x8}%q;!0g_%L%T=O}r z*a7(-1QNd}*O%FR%;!Ue1OwDdCsfaypAUwTcuDu|z#7nM5JazykjBe}xd;R8JePUL zo%`$NF!$%ByLiFB?iL*XpFEpr8ZX{qE5q)KGD`#>y@+(+EFgrQR~4k8vx+g8A&H8R z$!sSU(layuXB7A6Ixj`a_z35wkx;9D)C|_zg*usHfx@Ir$p%#cLeQFm4uv>st{^&0 z9ttTTO=f1i22qUZ!=7nZIkJezAW|G)@N)szq~D7eTeLmml5qwRwJ)EsvBuPM678H! zJQ{Mn(s3!^VNus1RRC=1!EXXI9lTd>7p2?xKI+Q%lc2lY7&c(_e=eEJozPm1ja*U; zjP*dCJY!%^Mc8-sCZO&Px2`HSdjSKk3^S-8Y7P8xzZ2(7dvdOm{IXv=SQ=yw#n3 zzh6F?#0(!9^Dwe8zz3L{k%)yNxI#CXgj4=Sxe(Sg;P7UkCb(d#@u52{X;PN6;5p61 zE_(v@=i*7Nam)847pGllbIk8Z#3>oJDjlKEhV8l~Ua$Ww^v~AjNPY6(NS;9VyCURO zi^OfP+aiUZMTMP}OJRkl`e=2ulpduITydWP`qcCpJ|w4s_#(2F%k+Q^^{uQT{?@@d z@$#vZrrcN>M9X{o9u0=MqY>Ql8b|YGok|hX=fqUlho9`qYb(#b?MoEkrmf{XEI1@7 zcxdRdk=4cA*an2$n2Ce&9;Z3kTe;qqGSPD8hXe&h?-Dl71$~GV(Uqu?6+)E|fjm>k zW<&a_^7fw)eYlmx@A}aULlMB;(A*#_MM+hSRUcNsq$7&&LeP{wS^bu(3nUF<3T?pi=AkQD3h7|_;!25mx0Mg zNQLi0lYGWcUEF7u`-pR!i@}LJWg->UXS-a3%m*0nPcF52G@o5#p#64=?U6>ngM1K~ z^6ahDcZz4WdHCA&6TG;P9jZLda6^KYV?;`7%cv6 z>|bsLhQKRr(P|p7EqI8NKQwG2z4oLSU-|aBv*lp*J6NkjGF(GIXWR#I~S^M><8}GfL#+U92`95Un9&EYb>g7mpUb_5pIT*dBrE;5S!7P zs5{u{+2Uu_2160wlTAS#&jnvoz@8TdV^w_^cQx*YUkJ@BHO9Kwvcw2E(F@<8({bFg z8;k3eBKF1DSN66WLnoY zk@P?V3*DXE+DTCU*)o6pxsAZ;r04c0mrbwWW6PAI)ce7OM8{du%L=fm8fSebrjXHl zKl^LH(r*&9$&t$MX{r^eD_{?379gp9JfvikM z|BFs9Wi+#BF19}DqBBfnLo#C*{Bj~{ywc(o)GprZxKfX397l(>@ZoS;%Vjz0A9~_< z&Oh7P)> z{kv_h2TH0}<#D=&kxz&VFKg%g_CI$d{jiy=ow>_U{u^`_1^*}LG=Pu% z<0Odr$w;BNR*-vdC91YqyRzjkg&iz!jV=E{p~Z+hqSS%JtjlzkQ&nzVueEyqa4p39 zCuSG_&7&h$oRKqT2QFv?(U0B#|Gzk#D*e~%cmen%3yBJ`kKDf zatBzGI3d{Q8~qw4(C9SQ(a8xY?x)2|Lt2frT2DFcDum4I7LxA9g~>%#l6j`0`k?K_NW;ug!J zP98#9Uh<}#JH!$qcR1U^<}JZYl(1*JxgC%1IG6jvBXtUdB{sI9)s|HowH_S=1&_px zWU@#5~r`^m6=jP(i_TSa8_(hi1)t^(S+SX3s&autgpW-szcwP4zeuW zMyGzww&u%o;@RSyGLYSB3+*Z{Mtp3W^O5G}e(4zA9Wds27cTt%%=f!nE7b|c-H(6r zM_#25*Mnym${)=plS6{l!w}zqCJhs9SZ4oFiG20i9%fAAmXLa{l5mDnQ?Aje4}2>L)5O4w&4E`^GqVz z7HXd86@7VJD`FWRq18APyPs%qN7RX3(G~bp0oP`2xFQFt)IIj`{JE3G zu#Vq2U5#krJy?pn@C9N7OU%6s2j~JRL={(Tcedzetvm@tdhHnBw;0VgK+}EBZ3~;C zpWGdo^mG&a2}C1IKYlH(u&m{4`CSBUoS=L*@DpNsRSNlLhaG=0uQrir76ZdT#E#ME zPvj~V9+eWm71mXIjWiEfp+h7 zh{Df{JMSbLD@hdODy|x%7{07rEL|N76Ga)?!~JyZewme8q363_zco`vROqN#UoVEj zHO7$oNVhrh7dc^SUF~p#Ugv3-`%7-c$7w2u>7pJtMtrfWJmM7R4>6Qn#F}5K@;flJ z+V1*^?VFULel!y6LJE6N#}#DCfhWDIK#0F6yt7en|{$}hqkzBWs}hMl5~`d02mnebOc|3(M1Y~AjVat^plm6qywlRF;z z9gt#KPkJ@i9l>B)Z}vwfWX^1+Zy118oI2&(ps^Jyvlr#-qtXY#DtHLjt@DM+o@0$# z+U`?P$$-s0?AsAWQ?Srr`V(F^5sm1Yui(!+0s`}ar$v42cHOU_kSwP4u}2l^PS@i* zCv4_JiJ;)0TRiV%Lns-K60uUTY&C`v#}afqo-jnT1PEnU^JzL?pn}RWt*7BSjRvra zE`gZ;iRLO)?|FvA3bONeEVaH?JsOEPG}37JovkUA;^93}6yrQYhI} z;*MR>TG!0D9_E8;5U&Q7)p&urgJ*u>v0Af@VrO&50-09d44f{Ry=XPlqcO_ zF`s6yUlnD;&UHc&eAhzc5E!k%L!D0*$C`w9os0}Tk651lr3Gmk4+99&ACL4b#i;JW zWYm;qqcVM-6yx4MhiG$y%NT5>p}P1NWl=A-rT$L7a%HS(b$K?cJa8du{J>3A{8_{H z&Cc!50mED(RU2awlcsl+i(A5g+bIrnHSr#Mh$~5tU(y$gO-x$qSfF52?Wc4uE~CD* zuUh;UWQ2SVvTq(FC5%6o@Rf{_|J0!yl5J{ z@BJn$T`tSn?JQR;F6mdhiTF0Wc3HfhQEVq{`YK|O%u194tNv=7*FY`j5+0A1hcQND z)S=QBr#k}{61Gcg$1=N|vlVx_vx7!7l+Eg}wTTp1SzwNg#V$aAH%KDu=id_Nbk?zD0GeB;(zdv_HKYc#nW0bq>uX;ixcUl$9 z6WaQ3(wH!MEu%34y$U#ZvSB|>$DgNaY~h%%5KqHh4N8!3D>|ntHF%vrwAo1IT-(?? z{)u$=mcJvpR>Uvy;CL237`fxk! zTEVcrM?V>b?%}&gG3cr}vFyrLCO|4{!b3pOf8$rXl?hA-^cB{4MfSH0xJr^IpWmmt zEnEDjf=fgXzt6lZ43O(Vw(TDg)qQLO2|+HjCoVQ8E{YWd7&?1RfRafideo0X7pBqx zV9$?Vsi;XcmLAwV%milrVYW+5CKO$=ag7Yla7|i89L4{vkP%$f_pB9FLPbX|vZD5i z`;({pJ|(s(ViU!=WGe2`t7bb}LUePVaLHb*Bw2L%Ub?bBTF-q~MphqUM&Q{+4f%;g zqVW{W&6kwzSfW0FO8&vo;%zFQ0q}qjjFj7-6AO^KQ**y}E0xdw98?OWfyDTcn@ zs)a&7h4`qd3CJ<+nt&3ScF>(|9zE2d=Y;yM^4Clg@0z~hop+#>j@cidu0PYD-8>_7 zW)_;n95cs#qjAT*uVZ;__{X%*KY!ds1B^`ApF?e^{o%XTA{dV$J8q&Y(}sUCiQupg zn;M3jiZfB*7XRPUi^%jRljqm{4hy^sNn1@zzWCi0-apG5I}fo%%gtAgAu=-F%Tk9; zrxlENsaC802iM!Zgm>X+eI&A#_cgvWDS7O>Bts>VB$bb8O^7buO48zCsJkjmW(j89 zv$@NtSYU%2fOv%Lv#EF_JN^~_E4bas&Lp1ZM(a%Ux3Jo+k+e?*@hY5Mc+)$Zpz5ke_umqZNi?K8%UQ3AoWRkVfs3W)8mx8C2U5|4+NF`|t0 zmnuJKp3sSovvt@+gAt?*LJSojI<-ETj)Xsn@e%zH(_uBd#8xSe3sn-fqL^8<}s0vdN0J8_mRq=&lf zi5*e2{^S_d*g}8pgcg-{9~LL0Dep)>iV-$_?gQSRd@M*@d4Y<9M#g;a5@&ndm#Rqx z?`gdBpcfoFJz;@2riDnC8)!p>cOv-xsEj)~15J?ZOk)e%w4`24*29`^qDi;VU$<=7$1DX;em;ldOc zXl_R{=pOru&rR2A_$jNGINcvicY*x}uvYq-r2A9zibc}SqMVV3`vqRqh>~rkw)z|4 zu?m+~3~X;~)uaS21(UuX#7>p&jn(Osu)^R)ZbFce?LG9~d@#Snbe<&c^NBULoQjkxd zz!iEh#%H#GJwzK$D)kcDIWIOWp9&cKqKJuC0XLEoL-W8eKd+o5f=Yq}W<ZnXPYKnhKWEk}_3?LUnc$m{`Ll8^{4GkZRrA2dg1YJiwt%Rt+VEf*B?E&H!yE`# z7uf?H@fbYYrV!1O<<+tZ@;5erwda4=Z#xovpOJ_S(<}Pgj2o(_a2YlApM*#Ek}ynB zMjBHj?gZ5fHTU}FKw_kl4s2he&YBEtYl!|Q{+v?o2xfhfby0HB`Pqs})CylEz5#Xd z@F`0MtG>C*(XXd6u;w}kUQ41GxP%}*a6R$bErnfy^&wQ?{gE%z?QYpDE@WBkJpAJO zxI*5oMzzVWl2In#4WBm4aU~%%*5r#_dqpJ*UUnvJ6`?+>ns_EievrOSHNFkDfqKMR z(btCNL<-+CRlc_pFIvm}PuvTm`U*KsHq;D?w#&1+UwsAfE{@72#*%*^ZrCTU#rXU0 zSiW3rJFRtgA&;|_9|$o?%%;Vf+QrDBeSm5rphd|aWS6OnoDN?IXo8_M!l;oAaMctV zk-#=dAf8m6X}mn)Y+pU2)@A+tsQ5GC+p zL|H+`&Z*0LM-ctKx;aLKzye~<^!K}ySi0E>Hv{8&sdGQs3$eYB^N!TnU)9>>Y*8@3 zROb#^E^>D9%zHG6r?K)Vo}uPAA7(w+ttUp-f_L3w^9f??@M2P;^grd-d z(CbM7V*TpRpsb9s-PftzXayfA^DIw_3GQPRD!xhDV$I*(S0lu-(Pj=IxTxdj?3Qc` zqH~x5#}4u^F<;Hq04s-;%U@N289om1-G|}|M@v|K7V8mq5x5VyzIbLzG_%P`13c*waoU6!DBE zd7Vk{uhI&kX|#}}@ck*A*JJW5{%|rVXF&6Cuk#bBEI}=6z|#+lsp@8=E7*0fels;?A>QPX{#$)5sGL zBL$(wfvwv%nZ9>iI4ourh$cgNVU8)LTXoU0d*r0SZ%%8ewYzhdO8G-2yr4|ys;dG= zW+snxt%kJtaQ*~wmPqb*u9(4T#^>^kS$KO$N$2!n@m*TYJ@ZOy62(pZuq|V01CtpI zf+@U|BnoX76C*5@cH`6Nqf{pdtWr)!w>>saHWuetlF>zd%lfSBh5$k`^IaouD#G=T zoP5IYRHW%?jG5a~v$O^BA*cc`Xy-94@GQkXP~ARtg#J6tUVf2oHwF#nwxT<(%)sP( zIo`OuJrg3+yIGg-`Mw)F9utMA-WPY%&5kRj#oPUH{J+A9eInux&WDKoE9wldoKAF+ zX@NwYh+YK71^2NrR2^~Y-LH~21!^YgCX*jnKolh~;EV)Bqngf7ln5CcV{bZ4>@Z+3R99AFfheqLz zi})H=_f{lva1Ml)ruQ`06hpI(*C!4*3ElWrBY{;dKu!MTL{k>rQFoW^sfoP?EU`;& z7p`L+4h@0m#07op*#a#Mo4Z|%d?NbM`7YPgQN9QoKf)dC@kJ8})pogORbACbp2t62 z3eDD%i>BCxt^-4D>cY$|{ya)TgEbsa3UMAtlYI9Qnc{fsoZ)8Xq0!N_gv|9n()MOa zW+f1jVY7+(QQj`8FZ|S#1%Adtfp%Zq(sWT_3&oUic+{<9Q^0y>Qm?;&`<~jvkbfJV zhVL7(;{aB|w!rTF(%lJ3rq>~*E_YPF1^JlPtZHxVc>0l~y*uvSzi*V{ky&ZvtZ-y- zKPiu0CMbGN{Ne$GmQ9U9V$;fIW?|B58it{4xtnodc`P-ufx_Y9mh3qZO2u-yQ5}7F zO9;w(!KJMieC352ZMt2P`Me@PH+VVC%l|2i1MSSIAbUQnqf)8Q$cdzCNd82S2$2wY z|7VY9Ub@HK%&|E4S|j8-@9HN+$(ZPArYxD>6o)*Y55KxM3w6Nxc?)L3edR9~?iBj@ zJF$ax?cM>+W^z>9df8=Fj}!(S;v+ZYVv{`*F9estzb9C@MS$!4<$8SMA}sV&EBoow z1kYaeQs=W;rdi0QCVK|nY$;;wDFPw}j5APs>_ixhE|JDP>s_BxGI~>PF=e%XV z5(3f`q=ZxrYxiC`o)uYKU$&}D|J~;DNFwAscfpEtJ!jB zNKGi)$%pnM$zAA_DLdU?)pC`%_ir_AE|WIAb&#(0y=FMoGBxby&j>>!p{ArzScIUI zpWPqjz{2EQLheyy9jpajlhKeVKTiBCoD0dfRX@C%5`fIXP>Z^-@dPfSb#ftOObkh; zc+g~DyTu#tr%eyi{fSp_X8ba=wCLsODHwl##o?7$udZ7N z|k%y9bb8j_|cw@xB<#DaI^M z|1cb!=`7lCTbuGJrv)wjt-q6&1LdsPKjq4dY$g2_2 z@v-Vdse>PB4be` z+mklJ@K%?QWQ+;{-pLowad#ffh2BFO58Rh&vtI|iFEKGYHpTtwctZhkE5f$Ps>0Mq zZwH#}{L0v{i`>dp=bmS7q69b+QeQNlDN&opxgLJGb(^N)x@!A-wo85aNVzWiO=`?x zBKEl{ICbwuaauvDt5ZL_vd0lo{YI^Gt$Q|U->J32nI zZ0R+*!2GeBd&(^1Wh$4qj0(LDKUI?ul`Kv;NU7mKh3CeDAge{#DYJEWJ z4@8rK2&HaE2-Nx#cvA57RK5=rz$LtcKEgEeJNNpXHtwzX;Fj?!k(BsFcKlh5u7Vkt zp)x?ik1pAMNh2yxau{R(le}5g zxN=!Nw|Xl=bX(0M2G1vd zF(IJn@uev)Z`eFPnla-*Ov-91iS;E`m#^;Woe}3d;lmzD)C!jHGlRj+|KRmw|Fb=l zF*dIbbb(y$GQ%`S?}x;(eWmb0R!Jl44Yx64$foMQ5@g*j>ZM^)p}3-$Kg;g1kgcO- z>V&oFqzIbwcH`n(h%ZBD#OCQ{xPZ@((B)0dh?$>anjId96Uc5~AKpaA=q3i?;6ym* zTont4v(o}i{2y61xLh9|9ij(nIrKSn^3o7QQuVFPf*de<{7@!2cRwO#_teJ4QAu=J zT?9PpSGO7aT{AjbrMzDjk!H=VC=$pZBx4bbN~uPxeIVwr{ItOSO6WL3H9~`cIwbJg zFzd+OSlR4A2qz-c@z$nqnvP3g|~ap%qVE!0o$ ze@aaV{@QzPFoOYOcgL|C7*Imk(gX@nTm6g8q?w1qS)PRe% z=ZEJL7RH$qYR=8Z%=6h#3Vn8G?^`#-2Tt@f0$~Dv@4+Q5MK8q4kF*DOg#DKHX)RaX z&PH?i2(u9%~yuVR|vd~bokRqM(I}yJHyk>8-~<{y+^W~cK(~mXKP7-4fHXy zcF(u6c7TdFL5lP0o;%jr?wW}n^?WBTd5c`=ksqNO^%0C5k6ne_gmyMAjUs3n#aS;C zoBvH+$f2R%siY7FHxP*8PSe6PoE;VxF%C9JQdy@_OTL%5(M+=I#;R2jmiRV{4;{0Y zue7;}uieS3(w&HD^=&aj1W^>XNVY%K-+{EBn@Vj0D8*!Ai04~Z9L0M5?pwWT?Rm8Q zUb#C!=DRC1s~3|X#S$dxzmttgg*G&D6b4dkR>^$no5wUj&$ za|^;Gdzc+|2w31mM{a`MgGRrzgi7JHFUpIv?H*5aetjAi( z2}^2D(9c7&IRt0G&3Ab)z*M|-6bywsAKLR`;7b&^`CtFOU_?J$arEd%=N%b-zRX^eF(8b{3Qr zhSPG_lp;_z6yV;=kH>SI7qpCEe28jY$9^J@@#@rPOx1gWEw^)J_%enadJq~mzcY|H zTkC_RgZUsPw*Gy+;Y@jK0=4v6UL}g_(Sq`pyRAPHE&OvpVU zAGzqZ9I8BQ>9m~)r~-pz&Uxe2G)g;)OFNt-yesW}F1OBo>KiSUKLiwz$eYhl-xyM5 z%`E>$CG7F5A4RSCX^Q-h<{YO%t1&%5c{Sjl0COmEJT6>wa>P=rStge6=u|nDD4#b; zo}4CC`G%&oQ}Pd-?DZNx!}SQ?X|pjW3z|x(3xN#qOF0qG$A#1WVbEJN@HaxC68;7M zaTJVv!J?N#>3rI{-t7_2w}jJXa+YI}mhzZOfvsJ#*a|cZN)mi%-dMo#eBANwRyz-G zJ%Np7pTA%cwe@PjdUPI}Ett{B%WQaaP>*h^sj;3M>`&ecYIy4-=F(uTEfL#J{N3vH za?K=gqzh*b0q=TalI5I!!kHOf{~o{dxSDo*;=m1>a@%lEzpvOe$)91W;-*AR!7cAN zc$r|GUY)pe^69u5JNv!IUHK70N+i>a1?y);9Ox5HhjELRta|R4i&t8X!zp&dQ$wlJ z`XGJo=TKCOGL*au{>HfHpsDev=ig> zX4D`z;rjSksvv|mW9XHLp7nsB_<@yuIlJLK`syXZq3W@6YS6X zhRca@3*xhA^%1o}=ic-g=iVC=An$VTCp%lMRKx8(qQ7b-2^MW&yiSZ^^0Sc%X1?j! z)?Ub!lr)vfu5-5rQtwI-9iCrH3f9-=QutHaRAY)LAzP9{Ua4AzesEgZu)=p31Yhij ztSHy&0mQ0jhGOcqWi2cU++FHPPhOk~L=}+jdHYNX`>f12nbZwlO96l`Rzat7NGT8F zkWQabfhPp@HwIqwn8)viHvI_&(Ir@Z*d!(;I#?AUlkg^rsx?CX6JqC#DjJLVu zS#chnOJjNrCb#&9S)$IVL=0Zj1ckUQ)DA$lm*UgRZx0_>2AI*mEgg}+EuBtJx12R% zLC_4}`e)z9E%b=D^xtUyM67i=1cVKz%#dY*rz5dmvsHAArw7rpG>S#J>bLtg&Fc${ z-kQmboG!E~NFR>HBAg$?zSDl`_~UbbKAdkg#2^C4fTp`(cUJhfz1UNSvcfK=EsEN^ z7@Udsb>9`_tFf!}9cOju!4Elf>f0(-q^7MAU2QQEgN`V${B*+Vu2aEeC`yQ}^pw1g}xvpSgQzfP*-3@u$-0RH__!l~C)SlOmi(kk&p z&IstG0E-)dUp5@7R-&AM;o4ax6*}C&8wRUhtQLmirTk?wpE7vUMDDI!*v?L&fg6k| zli*sCjky#PlR@@4Th76`Srf&iWft$|g7zp6|MWaE&0yeVao;+ko8ISyjHll&z|5m0 z@&NFw9?M526EHf>G%5XC7VUy5VS+Th+#{<1=tUoa#aJxe;|cFKa1%LuD8ui}UG?Ur z+zRq3I(sUhfA(8hH|S2jwq4gU??_(VK=%O8)m7|@ zP@7|sV9GX{ewIMeVFTfcb7$Vw6mgKU^ycp z4XPtALO_Sr8m>&sFY0n5bm}&!me)~k-^&>Y-bT}+3Zl97{`uSlAO}(wo#!gkP?b6M zN*yAGd)EG%9#qedk}gpwIfNeRt%(ZR1O{lVZQi31Qt)B3#Q%^@53j29wNb~vYN`@p z>K45i+0?nN@r|92MI;!*o8wikFnIu4b}yZcvH%*zj=7V@)OJCVtxqv zf6D%nxpBY;+H|$xi(4k{Gq6VNn1;G+#=%a)d*$(2+Wx(6IW9QAB5a=Rx2AAV)W(Z_ zMlE!lzrYM$M&glt(l?Oa$DgGobkF=>nVpE$lR0OHgtNU3slMX@RQ2KSm%@stYCKukUtOTBh`K&E>+*~c94Sao+bCpRo80#O{=?-jr-|HQ!GQGD&=c0( z2i2OvN|=5~;yq`2b8IdLnFvCd!zL#nSt&W8G?c7B_e!Apkw9#OT-xj930&y=jh-lqd#=0tMEoC)A`~BsSEOBjbMPBdGLrAE3cYS5#d4lArL11IW-HxT| zZp7iW;TH+#k}SJ<*ff2fobT+qwoE^1KS_ z|B3o4ZZ1hzD`5JIO%o{D|M8@ZKFgiQ?>H0%2IT{UgBvOfZ9O%$CGn!>OYa*hHWq-< z-$1sppD8UHz?6;3D<6|}nsc`vPn*&$p|oQ5f{!s;H88q72IYj=cZs4}SRKQGwSASPh z9CRQA^O)b$kCP63!4WL!^7E6l#~H|M@0NzSOrE5alCBnEQol+Fi;Ve#x75_SUA*CPvJXI;dBEY_IA6B%&0V|MLCG3tDL*cZr@ttR2i z^123j$Ni_NY6+3&*Ui)k%`>y&g%T?`diys?&n{_9lRQ#HfP%ooHg`j=^yy?R+_bMf zr1+yiXsHh2ZYL`* zMBT1&t)nDO=gNss47*mX@ypY|2mL7eFP0iED;34Bw7-%8$+$Kx0~_9K7f~k7h@>WY z$3q{t6hKc1@cP<#lz98+s=xx&Kdv%l4eCewiW2y0tuMhj>O`QML!sfkdb7s3 z&Q7k1AYi1jrfJ%W+> zmuvKxeEMf_wuw7(c{C}(sbQ}4N^TNEGlJO3FD?xOd+ZVd;sXNWO_eBcE(qS%|4%^3 z#ESwG7kYwJbVXX8D62@-P2lN)uIbpMi(cZw*br1ty1f1bfLb3`V8nC5wgjs; z?^ZJjGao5NP2Cm}s+U<4(dCfO^rAxFV&q6@J^4OUQ_<`BMf2FsUI@WfXGs2l_kifS zlB+5t%yrC{`0%Q)ik&xq<;RQukzi}~ti+j#-{q3Gh;#F%OlLsNlG8lquBkmvjzO2z z!7?j)5Yl9fDQC$9&~DG*I3ll&tf}zk*$KUOZ~&w38O6FuopiO=+p$;DhKDnf_+Gm> zTqCXZ$Ov{_8!s422S8!_VSZTx_3EI)miDpWBiM;RmcbWY&9&Fc=D%;Q^o53Kk12y#$T99}3~nZ?N5T(`chD6(4h^ z=dkHxI|{k%PY&$OM#gf1sNTsnhtWjvZ(R)%84Rs&;X3Bj^*Rp2k7spDOiYdA)N!i| zax5w<8T!3m=a*)cd_^ICjE{0Dqh^3n8N}nUT8|$&T5Dy*iyn(~R8OnGy)`X8Qg&ay zdvDjyTQ=(<c`Z04R|G+PaS zuJs1y+FOgkntlIX*&3hwxcV}GrI9n&ftk?-Acq9t>m*8_-1IP(@75SZ-?&r#7?{Oe2cbFXN(*yjl4QCE`uqj-3DcDMOlbX5kwJa0 z%E<}ndb7{lwzvJcpUYyb@L0?eThu_IYXTf#(fp^f9=WvVad%-n_380`=P3cW>Cn?g z0XTHY6b1WWxBrNAPJcGCYdFebg~;csL96XTRRW|r6-(mr`cHP4S3PD8-~3(p36YUd z=}DWVDlVrtC!6f;!n!(l^UFC(cc+GqB}jIxlal(E#W=W;=oJ{g-r&vH z)v;a>A#&49@rJT$>nibvZQW&lM_zk0A*aMSkA3j?eTc`^+uNC_iWlo3lUumes6JM# z%Cvwt<)ng6)wYD`GGPAQ5dc#6F2gfvv+d z-d-h~TSUDSHwb_(m@(q9&4e_jPvQr1LO6XNcR1?5=r?r}ELsT-2K8{c4;E6H?w-jX z%T082mfj!`9CEX=q|gwkNN||9O+0z5x()0)P>CMu#s<@+@Lz68RhxZFTJ^;MLaTji zoOZGp(BNUV!oFB$woN@$|s&dUkseU!C&EAa5c z_#cHNp}54T!-qwUuoF77CDwVC1U}9KQYNJY{qf&rsj*S5;`Az9q-NO(+GP2+8+P6OWqN^tm8i;@f^P*Op1mF+S-k4RFsM6hYoQ1 zWj(G{yJNyN)41gcFx)O>Bi`fG7~sl0qy`O4kv7l0Byq<26JC~OX(o#dl`!nRPvU6} ztRxk3X~jlqc`2vShr=e)s}=Ve9qXC^Nr#Xe(>e$^MHik-e1RMw!c}%q9fPXYkPmLpHhI^Z%taMw(76uKx=H?FB>dU+hNmL--q+BrimM6n zfgh>pzynjAXy#H+bpu!%B2!U^7Z63ib;oc_HXw7jv5H4Uv)5*@1I@Gq*D#$eT+Wf- zJ;&SNARw~GIjNH*V(T~nN9ijkDp;9+${tle<9_+4{rSdXv8ffzLg0FJ+ogO04(W1W|kn0O|DH9e=YDZtMP ze>~ZBv#-8Pu?%%n39YC~$41d1M@P6Lx1sM6gr_)fb?vzw8CpvQC}!K&M{w_7+=@GJ z=IVP5;l9kXGsW?uByQNb?QBz)8?m|K$dvGxT{Mjb4s};2v`E&TX|g7;j?%CShaly= z`cie`pt~4pc5xwa(J$nQxl|e_HYQeC+iKk(kNkA0YmL*$I2Jv(5-ywat%=yg{>a}3 z$m|Zs#Fk8Z`Dt`Vr}>+XOOoM>%Vwcy-ZL{YZYJax!i=du2Ib=x1S|G}nBI)%ltV#Q z0tT+#6vqPWr+?HNlv3feU)st%=QX>9)_l!CC^&zI$^#XGBiojqqN;kvVxtG z)WgczEgTSMMJDt;`OOcrE(xHoi+K;>1E38Fx$zeD_ahw<3==k2dcB*4*Ls{j60P;fG3V!l52ITg6w)fZ2}d<{oYdy!YT_ME z2LvZxk+wHs`oor(Y0VF9%2~;pi%CZfpl>S0V~eE|7KOq`HKnQk^|m*8M?Y+(wx1(R zmr^9B3K4aXYS`|4(_)uIuK!KO;_PwmW5Rm?;rtCfi^y#~{ZUE2NAiZ1eL0Dc;ZCAw z+)NiM!jcpp6v%t{F+c@iHWk) z@c8hFxMWwI+(F~!`I%z}L3-snpr3P?jlEuYpoW@-jE-#vih$Gq0K_*pbsx<973y+) z7nTy)`-H)pWHD5q5kk78&W0GMjC)$!;$qX=rZb!x@jX&x$~e}Z0w9LFoi4!jXNA-N ztF1E0S`}dm^!g{7uByc9FAJ`Pb%f>Yr46R5x2_mou}^x3aMJV2?LW^*T?NuMHGDw@ zpCeL(qAAUsxhzdPF~0=|!?SD`{6Y6pgudB7I&t^>5gg9F}L?BVX*vl=ji+(r8gtu853EvY>o*ey1;nM@ zFr()+S24b`FPzWeSG>rVN;=9^eW|Q~qZoD(3`uiP?jt;;Ju(Dt#(Fd?MSnwOv?QuJ zSO1hebP)4dNei69{awwyPATxlsx$b_#<|N_g{#WmOjo{$%S`CdJl@bauql(fjh)-B z7_dFpl*lU1PE%(&)9{@~wX3S$M0fG<6Y;Uvg)7F-dInJj1D8tRB5PJ;Jgh;=quztN zCb(-U8NYY)bBNSB{HOZh@10D(PzmFa9S=*P^K~uc30@o_PESrPu=XyvCDp5hStzqy znBd4Z4-n;I+0R+iP3vbmEOp-TP~<{)*F2HrS|o^oHoz=SFfr$ODw7M^R+Zq)?vq6P ztc)WQNGyzUf*ig(OT73^TOxu3RSGSPG&nzv>XZVXBn}*X`A{28)WJ9ad;I;tP-TgiIAkhNK6N<(vMY^>g%GGlt?s3p_DZQ8=$(p&uF5yX$zy(IYL^p$f&iuHMDhi_N+>2n=}!ST@&-L_0C8oEHs^rsB-A84jEu*p`Gl=p zThqb%+0Ut0`M;^e(epAPSO%GDzOIzk1`<-yKrV4gEKXzOTv7B*PQ=8xqjtiyIi_Y( zPrH^oyYq-Q)n-x=+fz77*n*EkwHll?>R>VzzQ0)Y1(|FOF($mkTUyw0np*8lcVHD`?uPF{O0F5Y(+xIhOx{_n5a>NTErEdcZA-3mHNZ5S5_xN zdAL#9ZLlLS?DM}P#`mWnrj=qZH*g5Opo%+aA$jks_@{W} zOnX-vB&5DhMJE9&T#ERN0zBA~u2t}O*_Yc(9rxt6SB1%W9xF-`}X*+VKt zCnYzSjXWIM1gnDNo@|bo9Vz*P76c#2V(2H5YVBpNlPCHlAIdd(b370=r7^>;fyR8~ z#a9U&1kkcQjK{@nQ>pJ}v>`0(eF|85JkmPkvutaC9il-ojQXe(his6|6?1`|Spx|k z3HNtCm8J)9)Zy*F!c1>M;rwjMzsX6|Y$1T|(0pbD6+DLV!1oi$uN+i}L*zzSa718K z>llIoru7p9Dz(vd0r)Bp_x5dHkoDeDao$;4b`vz}n zZ&Bie`m*+Zy%cY1H-`BjWo&|p$eB-bl!C>)N}i-co>^)gN|%mY=t5X(0?iC0nH#Qj z1g_tEz~?(c20S!CFj&xOKM%h0rW*bvPYIj?!&bYTZZP27^?PjUcgXu+cEn@n6q z3_A{0#$a5G1rTz-5Oxnfn0%y0C|cLPEaBU`XM}YsqmG9AfiaUlg7(p(lB#(m98c!! zkBg~TA5YO7H;6G5|7F;u?hX+y)+VrLTy(Ep#-+>7!o0uD7sdXIv`I#Qbb0iec|*4( z#-tr!NWf{abKf-~o6%PFw1$w-xi#LoPR}JiK;Hmc*{*_89fj1|#ad06%X@S3bEQ_Xl5l^f*eu6E8_w>sr5jiGt)7fv&R;{+B?>w(A_?4hfpQqR)Uwn?Fu`O3m{5|cw7MdJ)6|f zvITx?BqXUZ@T`-M3?1M1Cq*t}IE>HMs&^K8FxDEcK>LdCc`qa@K}BJ0?%AD9X-{^8 zeWuFn!a5#T_HgX+J{cb4WEhrkk_}=?I+DVqhTgO8My!P7s`7It@Th%;i>fO5$=Xul zwvZkA>-;i?E4IqHahb>xNKRsOh)E{95%os-!GFIT|AS?g7E082AB#p=V*;q;(s=K0 z>?BW0O4~z_cZjkz8NGc^h!2(LrZ2v~-!w&+phc7gk(ss12;fK=}yi4atamT%(hL|A!H z{CPb1IqeVaMK>Gq!7;^f_V6=*?AaXmiGWAJir<^v9L2e_IIo^A;x?_K5uSyb>9FA8 zDyWp5MhQuJtmO?6XF` zQ^4J0%o7Am*@z7b{gcQ}%4?3q+rPA2!-t7doh7jKPL8?cEFrss|7j;hs zcV_=RLSqDZd*~4jNskJQ{G8aQ#`#k;uCxr=zwutf;4Jcq$o{|@Cx|srx_Sv#=o`>`413 z)ck~XsSw$bxzMsPCp4wVF%ig_1yegspobCwE#6+&Ac~niw_NQcWXia=a%f^1L_a=} n86M@9Y2e>M`*&0bY|lHYF~TK}rd*2y1o$N-CNEkgY#8((aTkC3 literal 0 HcmV?d00001 diff --git a/website/docs/dev_deadline.md b/website/docs/dev_deadline.md new file mode 100644 index 00000000000..310b2e09837 --- /dev/null +++ b/website/docs/dev_deadline.md @@ -0,0 +1,38 @@ +--- +id: dev_deadline +title: Deadline integration +sidebar_label: Deadline integration +toc_max_heading_level: 4 +--- + +Deadline is not host as usual, it is missing most of the host features, but it does have +its own set of publishing plugins. + +## How to test OpenPype on Deadline + +### Versions + +Since 3.14 job submitted from OpenPype is bound to OpenPype version used to submit it. So +if you submit job with 3.14.8, Deadline will try to find that particular version and use it +for rendering. This is handled by `OPENPYPE_VERSION` variable on job - you can delete it from +there and then the version set in studio Settings will be used. + +![Deadline Job Version](assets/deadline_job_version.png) + +Deadline needs to bootstrap this version so it will try to look the closest compatible +build. So to use version 3.14.8 on Deadline it is enough to have build 3.14.0 or similar - important +are the first two version numbers - major and minor. If they match, the version +is considered compatible. + +### Testing + +So to test various changes you don't need to build again an again OpenPype and putting +it to directory where Deadline is looking for versions - this needs to be done only on +minor version change. That build will then be used to bootstrap whatever is set on the +job or in the studio Settings. + +So you can either use zip version if it suits you, or better set your sources directory +so it will be find as a version - for example with symlink. + +That way you can only modify `OPENPYPE_VERSION` variable on job to point it to version +you would like to test. \ No newline at end of file diff --git a/website/sidebars.js b/website/sidebars.js index af64282d61a..f2d9ffee061 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -157,6 +157,7 @@ module.exports = { "dev_host_implementation", "dev_publishing" ] - } + }, + "dev_deadline" ] }; From 1fc8528795d971d54aa4cebd4e74032828ffd712 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:11:45 +0000 Subject: [PATCH 059/130] Remove redundant infected code --- openpype/hosts/maya/api/lib.py | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index a415f625c0e..b2bbb823aa8 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -3436,28 +3436,3 @@ def dgcontext(mtime): # If no more nodes to process break the frame iterations.. if not node_dependencies: break - - -@contextlib.contextmanager -def selection(*nodes): - """Execute something with a specific Maya selection. - - Example: - .. code-block:: python - - cmds.select('side') - print(cmds.ls(sl=True)) - # ['side'] - - with selection('top', 'lambert1'): - print(cmds.ls) - # ['top', 'lambert1'] - - print(cmds.ls(sl=True)) - # ['side'] - - """ - current = cmds.ls(sl=True) - cmds.select(*nodes, noExpand=True) - yield - cmds.select(current, noExpand=True) \ No newline at end of file From fd6345743c330e7d6f2cd80a7b333851e7c1f75e Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:12:46 +0000 Subject: [PATCH 060/130] Creator settings --- .../hosts/maya/plugins/create/create_ass.py | 48 ++++++--- .../defaults/project_settings/maya.json | 19 +++- .../schemas/schema_maya_create.json | 102 +++++++++++++++++- 3 files changed, 148 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_ass.py b/openpype/hosts/maya/plugins/create/create_ass.py index 39f226900ab..903a8ef0cf3 100644 --- a/openpype/hosts/maya/plugins/create/create_ass.py +++ b/openpype/hosts/maya/plugins/create/create_ass.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from openpype.hosts.maya.api import ( lib, plugin @@ -9,12 +7,27 @@ class CreateAss(plugin.Creator): - """Arnold Archive""" + """Arnold Scene Source""" name = "ass" - label = "Ass StandIn" + label = "Arnold Scene Source" family = "ass" icon = "cube" + exportSequence = False + expandProcedurals = False + motionBlur = True + motionBlurKeys = 2 + motionBlurLength = 0.5 + maskOptions = False + maskCamera = False + maskLight = False + maskShape = False + maskShader = False + maskOverride = False + maskDriver = False + maskFilter = False + maskColor_manager = False + maskOperator = False def __init__(self, *args, **kwargs): super(CreateAss, self).__init__(*args, **kwargs) @@ -22,16 +35,27 @@ def __init__(self, *args, **kwargs): # Add animation data self.data.update(lib.collect_animation_data()) - # Vertex colors with the geometry - self.data["exportSequence"] = False + self.data["exportSequence"] = self.exportSequence + self.data["expandProcedurals"] = self.expandProcedurals + self.data["motionBlur"] = self.motionBlur + self.data["motionBlurKeys"] = self.motionBlurKeys + self.data["motionBlurLength"] = self.motionBlurLength + + # Masks + self.data["maskOptions"] = self.maskOptions + self.data["maskCamera"] = self.maskCamera + self.data["maskLight"] = self.maskLight + self.data["maskShape"] = self.maskShape + self.data["maskShader"] = self.maskShader + self.data["maskOverride"] = self.maskOverride + self.data["maskDriver"] = self.maskDriver + self.data["maskFilter"] = self.maskFilter + self.data["maskColor_manager"] = self.maskColor_manager + self.data["maskOperator"] = self.maskOperator def process(self): instance = super(CreateAss, self).process() - # data = OrderedDict(**self.data) - - - nodes = list() if (self.options or {}).get("useSelection"): @@ -42,7 +66,3 @@ def process(self): assContent = cmds.sets(name="content_SET") assProxy = cmds.sets(name="proxy_SET", empty=True) cmds.sets([assContent, assProxy], forceElement=instance) - - # self.log.info(data) - # - # self.data = data diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 988c0e777a2..a74f8e5827b 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -171,7 +171,22 @@ "enabled": true, "defaults": [ "Main" - ] + ], + "exportSequence": false, + "expandProcedurals": false, + "motionBlur": true, + "motionBlurKeys": 2, + "motionBlurLength": 0.5, + "maskOptions": false, + "maskCamera": false, + "maskLight": false, + "maskShape": false, + "maskShader": false, + "maskOverride": false, + "maskDriver": false, + "maskFilter": false, + "maskColor_manager": false, + "maskOperator": false }, "CreateAssembly": { "enabled": true, @@ -1007,4 +1022,4 @@ "ValidateNoAnimation": false } } -} \ No newline at end of file +} diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index bc6520474d5..6cf11e4cea5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -200,7 +200,103 @@ } ] }, - + { + "type": "dict", + "collapsible": true, + "key": "CreateAss", + "label": "Create Ass", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + }, + { + "type": "boolean", + "key": "exportSequence", + "label": "Export Sequence" + }, + { + "type": "boolean", + "key": "expandProcedurals", + "label": "Expand Procedurals" + }, + { + "type": "boolean", + "key": "motionBlur", + "label": "Motion Blur" + }, + { + "type": "number", + "key": "motionBlurKeys", + "label": "Motion Blur Keys", + "minimum": 0 + }, + { + "type": "number", + "key": "motionBlurLength", + "label": "Motion Blur Length", + "decimal": 3 + }, + { + "type": "boolean", + "key": "maskOptions", + "label": "Mask Options" + }, + { + "type": "boolean", + "key": "maskCamera", + "label": "Mask Camera" + }, + { + "type": "boolean", + "key": "maskLight", + "label": "Mask Light" + }, + { + "type": "boolean", + "key": "maskShape", + "label": "Mask Shape" + }, + { + "type": "boolean", + "key": "maskShader", + "label": "Mask Shader" + }, + { + "type": "boolean", + "key": "maskOverride", + "label": "Mask Override" + }, + { + "type": "boolean", + "key": "maskDriver", + "label": "Mask Driver" + }, + { + "type": "boolean", + "key": "maskFilter", + "label": "Mask Filter" + }, + { + "type": "boolean", + "key": "maskColor_manager", + "label": "Mask Color Manager" + }, + { + "type": "boolean", + "key": "maskOperator", + "label": "Mask Operator" + } + ] + }, { "type": "schema_template", "name": "template_create_plugin", @@ -217,10 +313,6 @@ "key": "CreateMultiverseUsdOver", "label": "Create Multiverse USD Override" }, - { - "key": "CreateAss", - "label": "Create Ass" - }, { "key": "CreateAssembly", "label": "Create Assembly" From 3cf47e25626675ec00d1202fbaf99d1ad4b95030 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:13:12 +0000 Subject: [PATCH 061/130] Collect camera from objectset if present. --- .../hosts/maya/plugins/publish/collect_ass.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 3ce1f2ccf15..69af4c777db 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -8,6 +8,7 @@ class CollectAssData(pyblish.api.InstancePlugin): """Collect Ass data.""" + # Offset to be after renderable camera collection. order = pyblish.api.CollectorOrder + 0.2 label = 'Collect Ass' families = ["ass"] @@ -25,7 +26,8 @@ def process(self, instance): instance.data['setMembers'] = members self.log.debug('content members: {}'.format(members)) elif objset.startswith("proxy_SET"): - assert len(members) == 1, "You have multiple proxy meshes, please only use one" + msg = "You have multiple proxy meshes, please only use one" + assert len(members) == 1, msg instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) @@ -35,4 +37,17 @@ def process(self, instance): group = re.compile(r" \[.*\]") instance.data["label"] = group.sub("", instance.data["label"]) + # Use camera in object set if present else default to render globals + # camera. + cameras = cmds.ls(type="camera", long=True) + renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)] + camera = renderable[0] + for node in instance.data["setMembers"]: + camera_shapes = cmds.listRelatives( + node, shapes=True, type="camera" + ) + if camera_shapes: + camera = node + instance.data["camera"] = camera + self.log.debug("data: {}".format(instance.data)) From 76bf9bf4de319182704b57687b9d97480e8b003b Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:13:42 +0000 Subject: [PATCH 062/130] Working extractor --- .../hosts/maya/plugins/publish/extract_ass.py | 155 ++++++++++++------ 1 file changed, 105 insertions(+), 50 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 5c21a4ff086..b6bd4a2e22c 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,20 +1,17 @@ import os +import contextlib from maya import cmds +import arnold from openpype.pipeline import publish from openpype.hosts.maya.api.lib import maintained_selection class ExtractAssStandin(publish.Extractor): - """Extract the content of the instance to a ass file + """Extract the content of the instance to a ass file""" - Things to pay attention to: - - If animation is toggled, are the frames correct - - - """ - - label = "Ass Standin (.ass)" + label = "Arnold Scene Source (.ass)" hosts = ["maya"] families = ["ass"] asciiAss = False @@ -28,50 +25,59 @@ def process(self, instance): filenames = list() file_path = os.path.join(staging_dir, filename) + kwargs = { + "filename": file_path, + "selected": True, + "asciiAss": self.asciiAss, + "shadowLinks": True, + "lightLinks": True, + "boundingBox": True, + "expandProcedurals": instance.data.get("expandProcedurals", False), + "camera": instance.data["camera"], + "mask": self.get_ass_export_mask(instance) + } + + # Motion blur + motion_blur = instance.data.get("motionBlur", True) + motion_blur_keys = instance.data.get("motionBlurKeys", 2) + motion_blur_length = instance.data.get("motionBlurLength", 0.5) + # Write out .ass file self.log.info("Writing: '%s'" % file_path) - with maintained_selection(): - self.log.info("Writing: {}".format(instance.data["setMembers"])) - cmds.select(instance.data["setMembers"], noExpand=True) - - if sequence: - self.log.info("Extracting ass sequence") - - # Collect the start and end including handles - start = instance.data.get("frameStartHandle", 1) - end = instance.data.get("frameEndHandle", 1) - step = instance.data.get("step", 0) - - exported_files = cmds.arnoldExportAss(filename=file_path, - selected=True, - asciiAss=self.asciiAss, - shadowLinks=True, - lightLinks=True, - boundingBox=True, - startFrame=start, - endFrame=end, - frameStep=step - ) - for file in exported_files: - filenames.append(os.path.split(file)[1]) - self.log.info("Exported: {}".format(filenames)) - else: - self.log.info("Extracting ass") - cmds.arnoldExportAss(filename=file_path, - selected=True, - asciiAss=False, - shadowLinks=True, - lightLinks=True, - boundingBox=True - ) - self.log.info("Extracted {}".format(filename)) - filenames = filename - optionals = [ - "frameStart", "frameEnd", "step", "handles", - "handleEnd", "handleStart" - ] - for key in optionals: - instance.data.pop(key, None) + with self.motion_blur_ctx(motion_blur, motion_blur_keys, motion_blur_length): + with maintained_selection(): + self.log.info( + "Writing: {}".format(instance.data["setMembers"]) + ) + cmds.select(instance.data["setMembers"], noExpand=True) + + if sequence: + self.log.info("Extracting ass sequence") + + # Collect the start and end including handles + kwargs.update({ + "start": instance.data.get("frameStartHandle", 1), + "end": instance.data.get("frameEndHandle", 1), + "step": instance.data.get("step", 0) + }) + + exported_files = cmds.arnoldExportAss(**kwargs) + + for file in exported_files: + filenames.append(os.path.split(file)[1]) + + self.log.info("Exported: {}".format(filenames)) + else: + self.log.info("Extracting ass") + cmds.arnoldExportAss(**kwargs) + self.log.info("Extracted {}".format(filename)) + filenames = filename + optionals = [ + "frameStart", "frameEnd", "step", "handles", + "handleEnd", "handleStart" + ] + for key in optionals: + instance.data.pop(key, None) if "representations" not in instance.data: instance.data["representations"] = [] @@ -84,9 +90,58 @@ def process(self, instance): } if sequence: - representation['frameStart'] = start + representation['frameStart'] = kwargs["start"] instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" % (instance.name, staging_dir)) + + #This should be separated out as library function that takes some + #attributes to modify with values. The function then resets to original + #values. + @contextlib.contextmanager + def motion_blur_ctx(self, force, keys, length): + if not force: + yield + return + + cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") + ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") + clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") + + cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) + if keys > 0: + cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) + if length >= 0: + cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) + + try: + yield + finally: + cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) + cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) + cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) + + #This should be refactored to lib. probably just need the node_types directionary + def get_ass_export_mask(self, instance): + mask = arnold.AI_NODE_ALL + + node_types = { + "options": arnold.AI_NODE_OPTIONS, + "camera": arnold.AI_NODE_CAMERA, + "light": arnold.AI_NODE_LIGHT, + "shape": arnold.AI_NODE_SHAPE, + "shader": arnold.AI_NODE_SHADER, + "override": arnold.AI_NODE_OVERRIDE, + "driver": arnold.AI_NODE_DRIVER, + "filter": arnold.AI_NODE_FILTER, + "color_manager": arnold.AI_NODE_COLOR_MANAGER, + "operator": arnold.AI_NODE_OPERATOR + } + + for key in node_types.keys(): + if instance.data.get("mask" + key.title()): + mask = mask ^ node_types[key] + + return mask From 31d14cb70fb19e80b96c01b649e8a2c8ff34953c Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 19:16:11 +0000 Subject: [PATCH 063/130] Remove redundant lib --- openpype/hosts/maya/api/mtoa.py | 187 -------------------------------- 1 file changed, 187 deletions(-) delete mode 100644 openpype/hosts/maya/api/mtoa.py diff --git a/openpype/hosts/maya/api/mtoa.py b/openpype/hosts/maya/api/mtoa.py deleted file mode 100644 index d19fecf6b5b..00000000000 --- a/openpype/hosts/maya/api/mtoa.py +++ /dev/null @@ -1,187 +0,0 @@ -# -*- coding: utf-8 -*- -"""Library of classes and functions deadling with MtoA functionality.""" -import tempfile -import contextlib - -import clique -import pyblish.api - -from maya import cmds - -from openpype.pipeline import publish -from .viewport import vp2_paused_context -from .lib import selection - - -class _AssExtractor(publish.Extractor): - """Base class for ASS type extractors.""" - - order = pyblish.api.ExtractorOrder + 0.01 - hosts = ["maya"] - - def get_ass_export_mask(self, maya_set): - import arnold # noqa - mask = arnold.AI_NODE_ALL - - ai_masks = {"options": {"value": arnold.AI_NODE_OPTIONS, - "default": False}, - "camera": {"value": arnold.AI_NODE_CAMERA, - "default": False}, - "light": {"value": arnold.AI_NODE_LIGHT, - "default": False}, - "shape": {"value": arnold.AI_NODE_SHAPE, - "default": True}, - "shader": {"value": arnold.AI_NODE_SHADER, - "default": True}, - "override": {"value": arnold.AI_NODE_OVERRIDE, - "default": False}, - "driver": {"value": arnold.AI_NODE_DRIVER, - "default": False}, - "filter": {"value": arnold.AI_NODE_FILTER, - "default": False}, - "color_manager": {"value": arnold.AI_NODE_COLOR_MANAGER, - "default": True}, - "operator": {"value": arnold.AI_NODE_OPERATOR, - "default": True}} - - for mask_name, mask_data in ai_masks.items(): - attr = "inf_ass_export_{}".format(mask_name) - - submask = self.get_set_attr("{}.{}".format(maya_set, attr), - default=mask_data["default"]) - - if not submask: - mask = mask ^ mask_data["value"] - - return mask - - def process(self, instance): - #What is a dry run? - #ass.rr seems like an abstract variable. Needs clarification. - dry_run = instance.data.get("ass.rr") - - staging_dir = self.staging_dir(instance) - sequence = instance.data.get("exportSequence", False) - - if not cmds.pluginInfo("mtoa", query=True, loaded=True): - cmds.loadPlugin("mtoa") - - # Export to a temporal path - export_dir = instance.context.data["stagingDir"] - export_path = tempfile.NamedTemporaryFile(suffix=".ass", - dir=export_dir, - delete=False) - - set_ = instance.data["set"] - kwargs = {"shadowLinks": 1, - "lightLinks": 1, - "boundingBox": True, - "selected": True, - "f": export_path.name} - - # Animation - - if sequence: - mask = self.get_ass_export_mask(set_) - start = instance.data.get("frameStartHandle", 1) - end = instance.data.get("frameEndHandle", 1) - step = instance.data.get("step", 1.0) - if start is not None: - kwargs["startFrame"] = float(start) - kwargs["endFrame"] = float(end) - kwargs["frameStep"] = float(step) - else: - mask = 44 - - #get/set should be plugin options. - # Generic options - if self.get_set_attr("{}.inf_ass_expand_procedurals".format(set_), - False): - kwargs["expandProcedurals"] = True - - if self.get_set_attr("{}.inf_ass_fullpath".format(set_), - True): - kwargs["fullPath"] = True - - kwargs["mask"] = mask - - # Motion blur - mb = self.get_set_attr("{}.inf_ass_motion_blur".format(set_), False) - keys = self.get_set_attr("{}.inf_ass_mb_keys".format(set_), -1) - length = self.get_set_attr("{}.inf_ass_mb_length".format(set_), -1) - - #Targets should already be collected - targets = self.get_targets(instance) - - _sorted_kwargs = sorted(kwargs.items(), key=lambda x: x[0]) - _sorted_kwargs = ["{}={!r}".format(x, y) for x, y in _sorted_kwargs] - - if not dry_run: - self.log.debug("Running command: cmds.arnoldExportAss({})" - .format(", ".join(_sorted_kwargs))) - #There should be a context for not updating the viewport from - #pointcache extraction. - with vp2_paused_context(): - with selection(targets): - with self.motion_blur_ctx(mb, keys, length): - result = cmds.arnoldExportAss(**kwargs) - else: - instance.data["assExportKwargs"] = kwargs - start = kwargs.get("startFrame") - end = kwargs.get("endFrame") - result = [] - - range_ = [0] - if start is not None: - range_ = range(int(start), int(end) + 1) - - for i in range_: - #padding amount should be configurable. 3 does not seems - #enough as default. - fp = "{}.{:03d}.ass".format(export_path.name, i) - with open(fp, "w"): - pass - result.append(fp) - - #Whether its a sequence or not, should already have been determined. - if len(result) == 1: - filepath = result[0] - else: - collection = clique.assemble(result)[0][0] - filepath = collection.format() - - # Register the file - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': 'ass', - 'ext': 'ass', - 'files': filepath, - "stagingDir": staging_dir - } - - instance.data["representations"].append(representation) - - @contextlib.contextmanager - def motion_blur_ctx(self, force, keys, length): - if not force: - yield - return - - cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") - ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") - clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") - - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) - if keys > 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) - if length >= 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) - - try: - yield - finally: - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) From b974c675de93610b5dadda038c1e0c59526b3726 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Tue, 6 Dec 2022 21:26:33 +0000 Subject: [PATCH 064/130] Refactor - use lib for attribute context - remove mask class method --- .../hosts/maya/plugins/publish/extract_ass.py | 96 +++++++------------ 1 file changed, 37 insertions(+), 59 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index b6bd4a2e22c..7fc0cc1b2ff 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,11 +1,10 @@ import os -import contextlib from maya import cmds import arnold from openpype.pipeline import publish -from openpype.hosts.maya.api.lib import maintained_selection +from openpype.hosts.maya.api.lib import maintained_selection, attribute_values class ExtractAssStandin(publish.Extractor): @@ -25,6 +24,40 @@ def process(self, instance): filenames = list() file_path = os.path.join(staging_dir, filename) + # Mask + mask = arnold.AI_NODE_ALL + + node_types = { + "options": arnold.AI_NODE_OPTIONS, + "camera": arnold.AI_NODE_CAMERA, + "light": arnold.AI_NODE_LIGHT, + "shape": arnold.AI_NODE_SHAPE, + "shader": arnold.AI_NODE_SHADER, + "override": arnold.AI_NODE_OVERRIDE, + "driver": arnold.AI_NODE_DRIVER, + "filter": arnold.AI_NODE_FILTER, + "color_manager": arnold.AI_NODE_COLOR_MANAGER, + "operator": arnold.AI_NODE_OPERATOR + } + + for key in node_types.keys(): + if instance.data.get("mask" + key.title()): + mask = mask ^ node_types[key] + + # Motion blur + values = { + "defaultArnoldRenderOptions.motion_blur_enable": instance.data.get( + "motionBlur", True + ), + "defaultArnoldRenderOptions.motion_steps": instance.data.get( + "motionBlurKeys", 2 + ), + "defaultArnoldRenderOptions.motion_frames": instance.data.get( + "motionBlurLength", 0.5 + ) + } + + # Write out .ass file kwargs = { "filename": file_path, "selected": True, @@ -34,17 +67,11 @@ def process(self, instance): "boundingBox": True, "expandProcedurals": instance.data.get("expandProcedurals", False), "camera": instance.data["camera"], - "mask": self.get_ass_export_mask(instance) + "mask": mask } - # Motion blur - motion_blur = instance.data.get("motionBlur", True) - motion_blur_keys = instance.data.get("motionBlurKeys", 2) - motion_blur_length = instance.data.get("motionBlurLength", 0.5) - - # Write out .ass file self.log.info("Writing: '%s'" % file_path) - with self.motion_blur_ctx(motion_blur, motion_blur_keys, motion_blur_length): + with attribute_values(values): with maintained_selection(): self.log.info( "Writing: {}".format(instance.data["setMembers"]) @@ -96,52 +123,3 @@ def process(self, instance): self.log.info("Extracted instance '%s' to: %s" % (instance.name, staging_dir)) - - #This should be separated out as library function that takes some - #attributes to modify with values. The function then resets to original - #values. - @contextlib.contextmanager - def motion_blur_ctx(self, force, keys, length): - if not force: - yield - return - - cmb = cmds.getAttr("defaultArnoldRenderOptions.motion_blur_enable") - ckeys = cmds.getAttr("defaultArnoldRenderOptions.motion_steps") - clen = cmds.getAttr("defaultArnoldRenderOptions.motion_frames") - - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", 1) - if keys > 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", keys) - if length >= 0: - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", length) - - try: - yield - finally: - cmds.setAttr("defaultArnoldRenderOptions.motion_blur_enable", cmb) - cmds.setAttr("defaultArnoldRenderOptions.motion_steps", ckeys) - cmds.setAttr("defaultArnoldRenderOptions.motion_frames", clen) - - #This should be refactored to lib. probably just need the node_types directionary - def get_ass_export_mask(self, instance): - mask = arnold.AI_NODE_ALL - - node_types = { - "options": arnold.AI_NODE_OPTIONS, - "camera": arnold.AI_NODE_CAMERA, - "light": arnold.AI_NODE_LIGHT, - "shape": arnold.AI_NODE_SHAPE, - "shader": arnold.AI_NODE_SHADER, - "override": arnold.AI_NODE_OVERRIDE, - "driver": arnold.AI_NODE_DRIVER, - "filter": arnold.AI_NODE_FILTER, - "color_manager": arnold.AI_NODE_COLOR_MANAGER, - "operator": arnold.AI_NODE_OPERATOR - } - - for key in node_types.keys(): - if instance.data.get("mask" + key.title()): - mask = mask ^ node_types[key] - - return mask From 5c3d44a0abc430e1670b70fc934e2d3484fd79fe Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Dec 2022 09:21:07 +0800 Subject: [PATCH 065/130] fix parenting issue when extracting proxy abc --- .../maya/plugins/publish/extract_proxy_abc.py | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index f348712d7c9..aa2a4b783c7 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -97,14 +97,17 @@ def create_proxy_geometry(self, instance, name_suffix, start, end): end=end)) inst_selection = cmds.ls(nodes, long=True) - cmds.geomToBBox(inst_selection, - nameSuffix=name_suffix, - keepOriginal=True, - single=False, - bakeAnimation=True, - startTime=start, - endTime=end) - bbox_sel = cmds.ls(sl=True, long=True) - + bbox = cmds.geomToBBox(inst_selection, + nameSuffix=name_suffix, + keepOriginal=True, + single=False, + bakeAnimation=True, + startTime=start, + endTime=end) + #TODO: fix the scale or disparenting for the group + # bbox_sel = cmds.listRelatives(bbox, parent=True) + # cmds.ls(bbox_sel, long=True) + master_group = cmds.group(name="bbox_grp") + bbox_sel = cmds.ls(master_group, long=True) self.log.debug("proxy_root: {}".format(bbox_sel)) return bbox_sel From b6b7ac29226f9c6084cad6ad8dba434d50bc96af Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Dec 2022 16:55:47 +0800 Subject: [PATCH 066/130] adding options for creating renderpasses in redshift and vray --- openpype/hosts/maya/api/lib_rendersettings.py | 27 ++- .../schemas/schema_maya_render_settings.json | 192 +++++++++--------- 2 files changed, 119 insertions(+), 100 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index 1293f1287d9..f9d24c3780f 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -150,13 +150,23 @@ def _set_redshift_settings(self, width, height): redshift_render_presets = render_settings["redshift_renderer"] remove_aovs = render_settings["remove_aovs"] + all_rs_aovs = cmds.ls(type='RedshiftAOV') if remove_aovs: - aovs = cmds.ls(type='RedshiftAOV') - for aov in aovs: + for aov in all_rs_aovs: enabled = cmds.getAttr("{}.enabled".format(aov)) if enabled: cmds.delete(aov) + redshift_aovs = redshift_render_presets["aov_list"] + for rs_aov in redshift_aovs: + rs_renderlayer = rs_aov.replace(" ", "") + rs_layername = "rsAov_{}".format(rs_renderlayer) + if rs_layername in all_rs_aovs: + continue + cmds.rsCreateAov(type=rs_aov) + # update the AOV list + mel.eval("redshiftUpdateActiveAovList;") + additional_options = redshift_render_presets["additional_options"] ext = redshift_render_presets["image_format"] img_exts = ["iff", "exr", "tif", "png", "tga", "jpg"] @@ -177,19 +187,26 @@ def _set_vray_settings(self, aov_separator, width, height): vray_render_presets = render_settings["vray_renderer"] # vrayRenderElement remove_aovs = render_settings["remove_aovs"] + all_vray_aovs = cmds.ls(type='VRayRenderElement') + lightSelect_aovs = cmds.ls(type='VRayRenderElementSet') if remove_aovs: - aovs = cmds.ls(type='VRayRenderElement') - for aov in aovs: + for aov in all_vray_aovs: # remove all aovs except LightSelect enabled = cmds.getAttr("{}.enabled".format(aov)) if enabled: cmds.delete(aov) # remove LightSelect - lightSelect_aovs = cmds.ls(type='VRayRenderElementSet') for light_aovs in lightSelect_aovs: light_enabled = cmds.getAttr("{}.enabled".format(light_aovs)) if light_enabled: cmds.delete(lightSelect_aovs) + + vray_aovs = vray_render_presets["aov_list"] + for renderlayer in vray_aovs: + renderElement = "vrayAddRenderElement {}".format(renderlayer) + RE_name = mel.eval(renderElement) + if RE_name.endswith("1"): # if there is more than one same render element + cmds.delete(RE_name) # Set aov separator # First we need to explicitly set the UI items in Render Settings # because that is also what V-Ray updates to when that Render Settings diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index c1bafc41082..512e45f6744 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -209,74 +209,76 @@ "defaults": "empty", "enum_items": [ {"empty": "< empty >"}, - {"atmosphereChannel": "atmosphere"}, - {"backgroundChannel": "background"}, - {"bumpNormalsChannel": "bumpnormals"}, - {"causticsChannel": "caustics"}, - {"coatFilterChannel": "coat_filter"}, - {"coatGlossinessChannel": "coatGloss"}, - {"coatReflectionChannel": "coat_reflection"}, - {"vrayCoatChannel": "coat_specular"}, - {"CoverageChannel": "coverage"}, - {"cryptomatteChannel": "cryptomatte"}, - {"customColor": "custom_color"}, - {"drBucketChannel": "DR"}, - {"denoiserChannel": "denoiser"}, - {"diffuseChannel": "diffuse"}, - {"ExtraTexElement": "extraTex"}, - {"giChannel": "GI"}, - {"LightMixElement": "None"}, - {"lightingChannel": "lighting"}, - {"LightingAnalysisChannel": "LightingAnalysis"}, - {"materialIDChannel": "materialID"}, - {"MaterialSelectElement": "materialSelect"}, - {"matteShadowChannel": "matteShadow"}, - {"MultiMatteElement": "multimatte"}, - {"multimatteIDChannel": "multimatteID"}, - {"normalsChannel": "normals"}, - {"nodeIDChannel": "objectId"}, - {"objectSelectChannel": "objectSelect"}, - {"rawCoatFilterChannel": "raw_coat_filter"}, - {"rawCoatReflectionChannel": "raw_coat_reflection"}, - {"rawDiffuseFilterChannel": "rawDiffuseFilter"}, - {"rawGiChannel": "rawGI"}, - {"rawLightChannel": "rawLight"}, - {"rawReflectionChannel": "rawReflection"}, - {"rawReflectionFilterChannel": "rawReflectionFilter"}, - {"rawRefractionChannel": "rawRefraction"}, - {"rawRefractionFilterChannel": "rawRefractionFilter"}, - {"rawShadowChannel": "rawShadow"}, - {"rawSheenFilterChannel": "raw_sheen_filter"}, - {"rawSheenReflectionChannel": "raw_sheen_reflection"}, - {"rawTotalLightChannel": "rawTotalLight"}, - {"reflectIORChannel": "reflIOR"}, - {"reflectChannel": "reflect"}, - {"reflectionFilterChannel": "reflectionFilter"}, - {"reflectGlossinessChannel": "reflGloss"}, - {"refractChannel": "refract"}, - {"refractionFilterChannel": "refractionFilter"}, - {"refractGlossinessChannel": "refrGloss"}, - {"renderIDChannel": "renderId"}, - {"FastSSS2Channel": "SSS"}, - {"sampleRateChannel": "sampleRate"}, + {"atmosphereChannel": "atmosphereChannel"}, + {"backgroundChannel": "backgroundChannel"}, + {"bumpNormalsChannel": "bumpNormalsChannel"}, + {"causticsChannel": "causticsChannel"}, + {"coatFilterChannel": "coatFilterChannel"}, + {"coatGlossinessChannel": "coatGlossinessChannel"}, + {"coatReflectionChannel": "coatReflectionChannel"}, + {"vrayCoatChannel": "vrayCoatChannel"}, + {"CoverageChannel": "CoverageChannel"}, + {"cryptomatteChannel": "cryptomatteChannel"}, + {"customColor": "customColor"}, + {"drBucketChannel": "drBucketChannel"}, + {"denoiserChannel": "denoiserChannel"}, + {"diffuseChannel": "diffuseChannel"}, + {"ExtraTexElement": "ExtraTexElement"}, + {"giChannel": "giChannel"}, + {"LightMixElement": "LightMixElement"}, + {"LightSelectElement": "LightSelectElement"}, + {"lightingChannel": "lightingChannel"}, + {"LightingAnalysisChannel": "LightingAnalysisChannel"}, + {"materialIDChannel": "materialIDChannel"}, + {"MaterialSelectElement": "MaterialSelectElement"}, + {"matteShadowChannel": "matteShadowChannel"}, + {"metalnessChannel": "metalnessChannel"}, + {"MultiMatteElement": "MultiMatteElement"}, + {"multimatteIDChannel": "multimatteIDChannel"}, + {"noiseLevelChannel": "noiseLevelChannel"}, + {"normalsChannel": "normalsChannel"}, + {"nodeIDChannel": "nodeIDChannel"}, + {"objectSelectChannel": "objectSelectChannel"}, + {"rawCoatFilterChannel": "rawCoatFilterChannel"}, + {"rawCoatReflectionChannel": "rawCoatReflectionChannel"}, + {"rawDiffuseFilterChannel": "rawDiffuseFilterChannel"}, + {"rawGiChannel": "rawGiChannel"}, + {"rawLightChannel": "rawLightChannel"}, + {"rawReflectionChannel": "rawReflectionChannel"}, + {"rawReflectionFilterChannel": "rawReflectionFilterChannel"}, + {"rawRefractionChannel": "rawRefractionChannel"}, + {"rawRefractionFilterChannel": "rawRefractionFilterChannel"}, + {"rawShadowChannel": "rawShadowChannel"}, + {"rawSheenFilterChannel": "rawSheenFilterChannel"}, + {"rawSheenReflectionChannel": "rawSheenReflectionChannel"}, + {"rawTotalLightChannel": "rawTotalLightChannel"}, + {"reflectIORChannel": "reflectIORChannel"}, + {"reflectChannel": "reflectChannel"}, + {"reflectionFilterChannel": "reflectionFilterChannel"}, + {"reflectGlossinessChannel": "reflectGlossinessChannel"}, + {"refractChannel": "refractChannel"}, + {"refractionFilterChannel": "refractionFilterChannel"}, + {"refractGlossinessChannel": "refractGlossinessChannel"}, + {"renderIDChannel": "renderIDChannel"}, + {"FastSSS2Channel": "FastSSS2Channel"}, + {"sampleRateChannel": "sampleRateChannel"}, {"samplerInfo": "samplerInfo"}, - {"selfIllumChannel": "selfIllum"}, - {"shadowChannel": "shadow"}, - {"sheenFilterChannel": "sheen_filter"}, - {"sheenGlossinessChannel": "sheenGloss"}, - {"sheenReflectionChannel": "sheen_reflection"}, - {"vraySheenChannel": "sheen_specular"}, - {"specularChannel": "specular"}, + {"selfIllumChannel": "selfIllumChannel"}, + {"shadowChannel": "shadowChannel"}, + {"sheenFilterChannel": "sheenFilterChannel"}, + {"sheenGlossinessChannel": "sheenGlossinessChannel"}, + {"sheenReflectionChannel": "sheenReflectionChannel"}, + {"vraySheenChannel": "vraySheenChannel"}, + {"specularChannel": "specularChannel"}, {"Toon": "Toon"}, - {"toonLightingChannel": "toonLighting"}, - {"toonSpecularChannel": "toonSpecular"}, - {"totalLightChannel": "totalLight"}, - {"unclampedColorChannel": "unclampedColor"}, - {"VRScansPaintMaskChannel": "VRScansPaintMask"}, - {"VRScansZoneMaskChannel": "VRScansZoneMask"}, - {"velocityChannel": "velocity"}, - {"zdepthChannel": "zDepth"}, - {"LightSelectElement": "lightselect"} + {"toonLightingChannel": "toonLightingChannel"}, + {"toonSpecularChannel": "toonSpecularChannel"}, + {"totalLightChannel": "totalLightChannel"}, + {"unclampedColorChannel": "unclampedColorChannel"}, + {"VRScansPaintMaskChannel": "VRScansPaintMaskChannel"}, + {"VRScansZoneMaskChannel": "VRScansZoneMaskChannel"}, + {"velocityChannel": "velocityChannel"}, + {"zdepthChannel": "zdepthChannel"} ] }, { @@ -366,46 +368,46 @@ "defaults": "empty", "enum_items": [ {"empty": "< none >"}, - {"AO": "Ambient Occlusion"}, + {"Ambient Occlusion": "Ambient Occlusion"}, {"Background": "Background"}, {"Beauty": "Beauty"}, - {"BumpNormals": "Bump Normals"}, + {"Bump Normals": "Bump Normals"}, {"Caustics": "Caustics"}, - {"CausticsRaw": "Caustics Raw"}, + {"Caustics Raw": "Caustics Raw"}, {"Cryptomatte": "Cryptomatte"}, {"Custom": "Custom"}, - {"Z": "Depth"}, - {"DiffuseFilter": "Diffuse Filter"}, - {"DiffuseLighting": "Diffuse Lighting"}, - {"DiffuseLightingRaw": "Diffuse Lighting Raw"}, + {"Depth": "Depth"}, + {"Diffuse Filter": "Diffuse Filter"}, + {"Diffuse Lighting": "Diffuse Lighting"}, + {"Diffuse Lighting Raw": "Diffuse Lighting Raw"}, {"Emission": "Emission"}, - {"GI": "Global Illumination"}, - {"GIRaw": "Global Illumination Raw"}, + {"Global Illumination": "Global Illumination"}, + {"Global Illumination Raw": "Global Illumination Raw"}, {"Matte": "Matte"}, - {"MotionVectors": "Ambient Occlusion"}, - {"N": "Normals"}, - {"ID": "ObjectID"}, - {"ObjectBumpNormal": "Object-Space Bump Normals"}, - {"ObjectPosition": "Object-Space Positions"}, - {"PuzzleMatte": "Puzzle Matte"}, + {"Motion Vectors": "Motion Vectors"}, + {"Normals": "Normals"}, + {"ObjectID": "ObjectID"}, + {"Object-Space Bump Normals": "Object-Space Bump Normals"}, + {"Object-Space Positions": "Object-Space Positions"}, + {"Puzzle Matte": "Puzzle Matte"}, {"Reflections": "Reflections"}, - {"ReflectionsFilter": "Reflections Filter"}, - {"ReflectionsRaw": "Reflections Raw"}, + {"Reflections Filter": "Reflections Filter"}, + {"Reflections Raw": "Reflections Raw"}, {"Refractions": "Refractions"}, - {"RefractionsFilter": "Refractions Filter"}, - {"RefractionsRaw": "Refractions Filter"}, + {"Refractions Filter": "Refractions Filter"}, + {"Refractions Raw": "Refractions Filter"}, {"Shadows": "Shadows"}, {"SpecularLighting": "Specular Lighting"}, - {"SSS": "Sub Surface Scatter"}, - {"SSSRaw": "Sub Surface Scatter Raw"}, - {"TotalDiffuseLightingRaw": "Total Diffuse Lighting Raw"}, - {"TotalTransLightingRaw": "Total Translucency Filter"}, - {"TransTint": "Translucency Filter"}, - {"TransGIRaw": "Translucency Lighting Raw"}, - {"VolumeFogEmission": "Volume Fog Emission"}, - {"VolumeFogTint": "Volume Fog Tint"}, - {"VolumeLighting": "Volume Lighting"}, - {"P": "World Position"} + {"Sub Surface Scatter": "Sub Surface Scatter"}, + {"Sub Surface Scatter Raw": "Sub Surface Scatter Raw"}, + {"Total Diffuse Lighting Raw": "Total Diffuse Lighting Raw"}, + {"Total Translucency Filter": "Total Translucency Filter"}, + {"Translucency Filter": "Translucency Filter"}, + {"Translucency Lighting Raw": "Translucency Lighting Raw"}, + {"Volume Fog Emission": "Volume Fog Emission"}, + {"Volume Fog Tint": "Volume Fog Tint"}, + {"Volume Lighting": "Volume Lighting"}, + {"World Position": "World Position"} ] }, { From 5b7a4c1704bb4114380cc0b99ef00784d22089df Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 7 Dec 2022 15:36:43 +0100 Subject: [PATCH 067/130] settings UI is using 'qtpy' instead of 'Qt' --- openpype/tools/settings/local_settings/apps_widget.py | 2 +- openpype/tools/settings/local_settings/environments_widget.py | 2 +- openpype/tools/settings/local_settings/experimental_widget.py | 2 +- openpype/tools/settings/local_settings/general_widget.py | 2 +- openpype/tools/settings/local_settings/mongo_widget.py | 2 +- openpype/tools/settings/local_settings/projects_widget.py | 2 +- openpype/tools/settings/local_settings/widgets.py | 2 +- openpype/tools/settings/local_settings/window.py | 2 +- openpype/tools/settings/settings/base.py | 2 +- openpype/tools/settings/settings/breadcrumbs_widget.py | 2 +- openpype/tools/settings/settings/categories.py | 2 +- openpype/tools/settings/settings/color_widget.py | 2 +- openpype/tools/settings/settings/constants.py | 2 +- openpype/tools/settings/settings/dialogs.py | 2 +- openpype/tools/settings/settings/dict_conditional.py | 2 +- openpype/tools/settings/settings/dict_mutable_widget.py | 2 +- openpype/tools/settings/settings/images/__init__.py | 2 +- openpype/tools/settings/settings/item_widgets.py | 2 +- openpype/tools/settings/settings/lib.py | 2 +- openpype/tools/settings/settings/list_item_widget.py | 2 +- openpype/tools/settings/settings/list_strict_widget.py | 2 +- openpype/tools/settings/settings/multiselection_combobox.py | 2 +- openpype/tools/settings/settings/search_dialog.py | 2 +- openpype/tools/settings/settings/tests.py | 2 +- openpype/tools/settings/settings/widgets.py | 2 +- openpype/tools/settings/settings/window.py | 2 +- openpype/tools/settings/settings/wrapper_widgets.py | 2 +- 27 files changed, 27 insertions(+), 27 deletions(-) diff --git a/openpype/tools/settings/local_settings/apps_widget.py b/openpype/tools/settings/local_settings/apps_widget.py index c1f350fcbcc..ce1fc86c32a 100644 --- a/openpype/tools/settings/local_settings/apps_widget.py +++ b/openpype/tools/settings/local_settings/apps_widget.py @@ -1,5 +1,5 @@ import platform -from Qt import QtWidgets +from qtpy import QtWidgets from .widgets import ( Separator, ExpandingWidget diff --git a/openpype/tools/settings/local_settings/environments_widget.py b/openpype/tools/settings/local_settings/environments_widget.py index 14ca517851f..5008f086d2c 100644 --- a/openpype/tools/settings/local_settings/environments_widget.py +++ b/openpype/tools/settings/local_settings/environments_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.tools.utils import PlaceholderLineEdit diff --git a/openpype/tools/settings/local_settings/experimental_widget.py b/openpype/tools/settings/local_settings/experimental_widget.py index 22ef952356b..b0d93816639 100644 --- a/openpype/tools/settings/local_settings/experimental_widget.py +++ b/openpype/tools/settings/local_settings/experimental_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.tools.experimental_tools import ( ExperimentalTools, LOCAL_EXPERIMENTAL_KEY diff --git a/openpype/tools/settings/local_settings/general_widget.py b/openpype/tools/settings/local_settings/general_widget.py index 35add7573e6..5a75c219dcb 100644 --- a/openpype/tools/settings/local_settings/general_widget.py +++ b/openpype/tools/settings/local_settings/general_widget.py @@ -1,6 +1,6 @@ import getpass -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.lib import is_admin_password_required from openpype.widgets import PasswordDialog from openpype.tools.utils import PlaceholderLineEdit diff --git a/openpype/tools/settings/local_settings/mongo_widget.py b/openpype/tools/settings/local_settings/mongo_widget.py index 600ab792429..9549d6eb17e 100644 --- a/openpype/tools/settings/local_settings/mongo_widget.py +++ b/openpype/tools/settings/local_settings/mongo_widget.py @@ -2,7 +2,7 @@ import sys import traceback -from Qt import QtWidgets +from qtpy import QtWidgets from pymongo.errors import ServerSelectionTimeoutError from openpype.lib import change_openpype_mongo_url diff --git a/openpype/tools/settings/local_settings/projects_widget.py b/openpype/tools/settings/local_settings/projects_widget.py index 30a0d212f0c..b330d54deca 100644 --- a/openpype/tools/settings/local_settings/projects_widget.py +++ b/openpype/tools/settings/local_settings/projects_widget.py @@ -1,6 +1,6 @@ import platform import copy -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.tools.settings.settings import ProjectListWidget from openpype.tools.utils import PlaceholderLineEdit from openpype.settings.constants import ( diff --git a/openpype/tools/settings/local_settings/widgets.py b/openpype/tools/settings/local_settings/widgets.py index 2733aef187a..f40978a66f1 100644 --- a/openpype/tools/settings/local_settings/widgets.py +++ b/openpype/tools/settings/local_settings/widgets.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.tools.settings.settings.widgets import ( ExpandingWidget ) diff --git a/openpype/tools/settings/local_settings/window.py b/openpype/tools/settings/local_settings/window.py index 76c2d851e93..fdb05e219f3 100644 --- a/openpype/tools/settings/local_settings/window.py +++ b/openpype/tools/settings/local_settings/window.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtGui +from qtpy import QtWidgets, QtGui from openpype import style diff --git a/openpype/tools/settings/settings/base.py b/openpype/tools/settings/settings/base.py index 6def284a831..074ecdae902 100644 --- a/openpype/tools/settings/settings/base.py +++ b/openpype/tools/settings/settings/base.py @@ -5,7 +5,7 @@ import functools import datetime -from Qt import QtWidgets, QtGui, QtCore +from qtpy import QtWidgets, QtGui, QtCore from openpype.settings.entities import ProjectSettings from openpype.tools.settings import CHILD_OFFSET diff --git a/openpype/tools/settings/settings/breadcrumbs_widget.py b/openpype/tools/settings/settings/breadcrumbs_widget.py index 7524bc61f06..2676d2f52db 100644 --- a/openpype/tools/settings/settings/breadcrumbs_widget.py +++ b/openpype/tools/settings/settings/breadcrumbs_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtGui, QtCore +from qtpy import QtWidgets, QtGui, QtCore PREFIX_ROLE = QtCore.Qt.UserRole + 1 LAST_SEGMENT_ROLE = QtCore.Qt.UserRole + 2 diff --git a/openpype/tools/settings/settings/categories.py b/openpype/tools/settings/settings/categories.py index e1b3943317d..2e5ce496ed6 100644 --- a/openpype/tools/settings/settings/categories.py +++ b/openpype/tools/settings/settings/categories.py @@ -2,7 +2,7 @@ import traceback import contextlib from enum import Enum -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import qtawesome from openpype.lib import get_openpype_version diff --git a/openpype/tools/settings/settings/color_widget.py b/openpype/tools/settings/settings/color_widget.py index b38b46f3cb6..819bfb35817 100644 --- a/openpype/tools/settings/settings/color_widget.py +++ b/openpype/tools/settings/settings/color_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from .item_widgets import InputWidget diff --git a/openpype/tools/settings/settings/constants.py b/openpype/tools/settings/settings/constants.py index 23526e4de93..b2792d885b5 100644 --- a/openpype/tools/settings/settings/constants.py +++ b/openpype/tools/settings/settings/constants.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore DEFAULT_PROJECT_LABEL = "< Default >" diff --git a/openpype/tools/settings/settings/dialogs.py b/openpype/tools/settings/settings/dialogs.py index b1b4daa1a0e..38da3cf881b 100644 --- a/openpype/tools/settings/settings/dialogs.py +++ b/openpype/tools/settings/settings/dialogs.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.tools.utils.delegates import pretty_date diff --git a/openpype/tools/settings/settings/dict_conditional.py b/openpype/tools/settings/settings/dict_conditional.py index b2a7bb52a2e..564603b258b 100644 --- a/openpype/tools/settings/settings/dict_conditional.py +++ b/openpype/tools/settings/settings/dict_conditional.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets from .widgets import ( ExpandingWidget, diff --git a/openpype/tools/settings/settings/dict_mutable_widget.py b/openpype/tools/settings/settings/dict_mutable_widget.py index 1c704b3cd5c..b9932da7899 100644 --- a/openpype/tools/settings/settings/dict_mutable_widget.py +++ b/openpype/tools/settings/settings/dict_mutable_widget.py @@ -1,6 +1,6 @@ from uuid import uuid4 -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from .base import BaseWidget from .lib import ( diff --git a/openpype/tools/settings/settings/images/__init__.py b/openpype/tools/settings/settings/images/__init__.py index 3ad65e114ae..0b246349a8c 100644 --- a/openpype/tools/settings/settings/images/__init__.py +++ b/openpype/tools/settings/settings/images/__init__.py @@ -1,5 +1,5 @@ import os -from Qt import QtGui +from qtpy import QtGui def get_image_path(image_filename): diff --git a/openpype/tools/settings/settings/item_widgets.py b/openpype/tools/settings/settings/item_widgets.py index 1ddee7efbed..d51f9b9684e 100644 --- a/openpype/tools/settings/settings/item_widgets.py +++ b/openpype/tools/settings/settings/item_widgets.py @@ -1,6 +1,6 @@ import json -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.widgets.sliders import NiceSlider from openpype.tools.settings import CHILD_OFFSET diff --git a/openpype/tools/settings/settings/lib.py b/openpype/tools/settings/settings/lib.py index eef157812fb..0bcf8a4e949 100644 --- a/openpype/tools/settings/settings/lib.py +++ b/openpype/tools/settings/settings/lib.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore from .widgets import SettingsToolBtn diff --git a/openpype/tools/settings/settings/list_item_widget.py b/openpype/tools/settings/settings/list_item_widget.py index cd1fd912ae6..67ce4b9dc94 100644 --- a/openpype/tools/settings/settings/list_item_widget.py +++ b/openpype/tools/settings/settings/list_item_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.tools.settings import ( CHILD_OFFSET diff --git a/openpype/tools/settings/settings/list_strict_widget.py b/openpype/tools/settings/settings/list_strict_widget.py index f0a3022a50c..b0b78e57326 100644 --- a/openpype/tools/settings/settings/list_strict_widget.py +++ b/openpype/tools/settings/settings/list_strict_widget.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from .widgets import ( GridLabelWidget, diff --git a/openpype/tools/settings/settings/multiselection_combobox.py b/openpype/tools/settings/settings/multiselection_combobox.py index c2cc2a8fee3..4cc81ff56e7 100644 --- a/openpype/tools/settings/settings/multiselection_combobox.py +++ b/openpype/tools/settings/settings/multiselection_combobox.py @@ -1,4 +1,4 @@ -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets class ComboItemDelegate(QtWidgets.QStyledItemDelegate): diff --git a/openpype/tools/settings/settings/search_dialog.py b/openpype/tools/settings/settings/search_dialog.py index e6538cfe67e..2860e7c943a 100644 --- a/openpype/tools/settings/settings/search_dialog.py +++ b/openpype/tools/settings/settings/search_dialog.py @@ -1,7 +1,7 @@ import re import collections -from Qt import QtCore, QtWidgets, QtGui +from qtpy import QtCore, QtWidgets, QtGui ENTITY_LABEL_ROLE = QtCore.Qt.UserRole + 1 ENTITY_PATH_ROLE = QtCore.Qt.UserRole + 2 diff --git a/openpype/tools/settings/settings/tests.py b/openpype/tools/settings/settings/tests.py index fc53e38ad5a..772d4618f72 100644 --- a/openpype/tools/settings/settings/tests.py +++ b/openpype/tools/settings/settings/tests.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore def indented_print(data, indent=0): diff --git a/openpype/tools/settings/settings/widgets.py b/openpype/tools/settings/settings/widgets.py index b8ad21e7e47..fd04cb0a234 100644 --- a/openpype/tools/settings/settings/widgets.py +++ b/openpype/tools/settings/settings/widgets.py @@ -1,6 +1,6 @@ import copy import uuid -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui import qtawesome from openpype.client import get_projects diff --git a/openpype/tools/settings/settings/window.py b/openpype/tools/settings/settings/window.py index 77a2f64dace..f479908f7b4 100644 --- a/openpype/tools/settings/settings/window.py +++ b/openpype/tools/settings/settings/window.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtGui, QtCore +from qtpy import QtWidgets, QtGui, QtCore from openpype import style diff --git a/openpype/tools/settings/settings/wrapper_widgets.py b/openpype/tools/settings/settings/wrapper_widgets.py index b14a226912f..0b45a9a01bb 100644 --- a/openpype/tools/settings/settings/wrapper_widgets.py +++ b/openpype/tools/settings/settings/wrapper_widgets.py @@ -1,5 +1,5 @@ from uuid import uuid4 -from Qt import QtWidgets +from qtpy import QtWidgets from .widgets import ( ExpandingWidget, From dbe5872960fc96ee6ad5a17d8a6c289d6ee6f9ef Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Wed, 7 Dec 2022 23:43:23 +0800 Subject: [PATCH 068/130] options for creating renderpasses in redshift and vray --- openpype/hosts/maya/api/lib_rendersettings.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index f9d24c3780f..d9b79e3c2f9 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -159,13 +159,16 @@ def _set_redshift_settings(self, width, height): redshift_aovs = redshift_render_presets["aov_list"] for rs_aov in redshift_aovs: - rs_renderlayer = rs_aov.replace(" ", "") - rs_layername = "rsAov_{}".format(rs_renderlayer) + if " " in rs_aov: + rs_renderlayer = rs_aov.replace(" ", "") + rs_layername = "rsAov_{}".format(rs_renderlayer) + else: + rs_layername = "rsAov_{}".format(rs_aov) if rs_layername in all_rs_aovs: continue cmds.rsCreateAov(type=rs_aov) # update the AOV list - mel.eval("redshiftUpdateActiveAovList;") + mel.eval("redshiftUpdateActiveAovList") additional_options = redshift_render_presets["additional_options"] ext = redshift_render_presets["image_format"] @@ -205,7 +208,8 @@ def _set_vray_settings(self, aov_separator, width, height): for renderlayer in vray_aovs: renderElement = "vrayAddRenderElement {}".format(renderlayer) RE_name = mel.eval(renderElement) - if RE_name.endswith("1"): # if there is more than one same render element + # if there is more than one same render element + if RE_name.endswith("1"): cmds.delete(RE_name) # Set aov separator # First we need to explicitly set the UI items in Render Settings From d7cc795d1fdc2fd635488bcc5282217e4b36b9c5 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Dec 2022 16:59:43 +0800 Subject: [PATCH 069/130] gltf extractor for Maya --- openpype/hosts/maya/plugins/publish/collect_gltf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py index bbc4e31f92d..bb37fe3a7eb 100644 --- a/openpype/hosts/maya/plugins/publish/collect_gltf.py +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -13,5 +13,5 @@ def process(self, instance): if not instance.data.get("families"): instance.data["families"] = [] - if "fbx" not in instance.data["families"]: + if "gltf" not in instance.data["families"]: instance.data["families"].append("gltf") From 444d5cd7bcb1d483d9f373b981e866a74a82dd85 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Dec 2022 18:24:21 +0800 Subject: [PATCH 070/130] create master group for boudning box as root for publishing abc --- .../maya/plugins/publish/extract_proxy_abc.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index aa2a4b783c7..feb174559f8 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -97,16 +97,15 @@ def create_proxy_geometry(self, instance, name_suffix, start, end): end=end)) inst_selection = cmds.ls(nodes, long=True) - bbox = cmds.geomToBBox(inst_selection, - nameSuffix=name_suffix, - keepOriginal=True, - single=False, - bakeAnimation=True, - startTime=start, - endTime=end) - #TODO: fix the scale or disparenting for the group - # bbox_sel = cmds.listRelatives(bbox, parent=True) - # cmds.ls(bbox_sel, long=True) + cmds.geomToBBox(inst_selection, + nameSuffix=name_suffix, + keepOriginal=True, + single=False, + bakeAnimation=True, + startTime=start, + endTime=end) + # create master group for bounding + # boxes as the main root master_group = cmds.group(name="bbox_grp") bbox_sel = cmds.ls(master_group, long=True) self.log.debug("proxy_root: {}".format(bbox_sel)) From c0319efd2ace058f682578805bd688b06bcf389d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 8 Dec 2022 12:05:01 +0100 Subject: [PATCH 071/130] change import in settings init --- openpype/tools/settings/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/settings/__init__.py b/openpype/tools/settings/__init__.py index 3e77a8348a8..0bc166b437b 100644 --- a/openpype/tools/settings/__init__.py +++ b/openpype/tools/settings/__init__.py @@ -1,5 +1,5 @@ import sys -from Qt import QtWidgets, QtGui +from qtpy import QtWidgets, QtGui from openpype import style from .lib import ( From 367b7b262ea9131d7e9c184c704b852590dbb887 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Thu, 8 Dec 2022 13:06:10 +0100 Subject: [PATCH 072/130] changes for better support --- openpype/lib/path_templates.py | 2 +- openpype/pipeline/load/utils.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index b160054e380..0f99efb430a 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -422,7 +422,7 @@ def normalized(self): cls = self.__class__ return cls( - os.path.normpath(self), + os.path.normpath(self.replace("\\", "/")), self.template, self.solved, self.used_values, diff --git a/openpype/pipeline/load/utils.py b/openpype/pipeline/load/utils.py index bfa9fe07c7b..784d4628f3a 100644 --- a/openpype/pipeline/load/utils.py +++ b/openpype/pipeline/load/utils.py @@ -555,7 +555,7 @@ def get_representation_path_with_anatomy(repre_doc, anatomy): """ try: - template = repre_doc["data"]["template"].replace("\\", "/") + template = repre_doc["data"]["template"] except KeyError: raise InvalidRepresentationContext(( From 7c4c579fbe11ed248e6213925a04930e0467593f Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Dec 2022 22:49:15 +0800 Subject: [PATCH 073/130] create master group for boudning box as root for publishing abc --- .../maya/plugins/publish/extract_proxy_abc.py | 34 +++++++++++++++++-- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index feb174559f8..07c28a231a7 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -86,9 +86,8 @@ def process(self, instance): remove_bb = instance.data.get("removeBoundingBoxAfterPublish") if remove_bb: - for bbox in proxy_root: - bounding_box = cmds.listRelatives(bbox, parent=True) - cmds.delete(bounding_box) + bbox_master = cmds.ls("bbox_grp") + cmds.delete(bbox_master) def create_proxy_geometry(self, instance, name_suffix, start, end): nodes = instance[:] @@ -104,9 +103,38 @@ def create_proxy_geometry(self, instance, name_suffix, start, end): bakeAnimation=True, startTime=start, endTime=end) + # select the top group + self.top_hierarchy_selection() # create master group for bounding # boxes as the main root master_group = cmds.group(name="bbox_grp") bbox_sel = cmds.ls(master_group, long=True) self.log.debug("proxy_root: {}".format(bbox_sel)) return bbox_sel + + # find the top group of the bounding box transform + def top_hierarchy_selection(self): + targets = cmds.ls(sl=True, long=True) + top_grp_list = [] + for target in targets: + top_parent = None + stop = False + + while not stop: + top_grp = cmds.listRelatives(top_parent or target, + parent=True, + path=True) + if top_grp is None: + # the loop would be stopped + # after top group found + stop = True + else: + top_parent = top_grp[0] + + if top_grp: + self.log.debug('{} is the top group'.format(top_parent)) + if top_parent in top_grp_list: + continue + top_grp_list.append(top_parent) + + return cmds.select(top_grp_list) From 20400b51c995db486480636783336d00e0a8c162 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Dec 2022 16:11:26 +0100 Subject: [PATCH 074/130] :recycle: remove `exportSequence` flag --- .../hosts/maya/plugins/create/create_ass.py | 4 +- .../hosts/maya/plugins/publish/collect_ass.py | 13 +++--- .../hosts/maya/plugins/publish/extract_ass.py | 44 +++++++------------ .../defaults/project_settings/maya.json | 1 - 4 files changed, 23 insertions(+), 39 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_ass.py b/openpype/hosts/maya/plugins/create/create_ass.py index 903a8ef0cf3..935a068ca5a 100644 --- a/openpype/hosts/maya/plugins/create/create_ass.py +++ b/openpype/hosts/maya/plugins/create/create_ass.py @@ -13,7 +13,6 @@ class CreateAss(plugin.Creator): label = "Arnold Scene Source" family = "ass" icon = "cube" - exportSequence = False expandProcedurals = False motionBlur = True motionBlurKeys = 2 @@ -35,7 +34,6 @@ def __init__(self, *args, **kwargs): # Add animation data self.data.update(lib.collect_animation_data()) - self.data["exportSequence"] = self.exportSequence self.data["expandProcedurals"] = self.expandProcedurals self.data["motionBlur"] = self.motionBlur self.data["motionBlurKeys"] = self.motionBlurKeys @@ -56,7 +54,7 @@ def __init__(self, *args, **kwargs): def process(self): instance = super(CreateAss, self).process() - nodes = list() + nodes = [] if (self.options or {}).get("useSelection"): nodes = cmds.ls(selection=True) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 69af4c777db..45ec5b124ee 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,6 +1,7 @@ import re from maya import cmds +from openpype.pipeline.publish import KnownPublishError import pyblish.api @@ -26,16 +27,14 @@ def process(self, instance): instance.data['setMembers'] = members self.log.debug('content members: {}'.format(members)) elif objset.startswith("proxy_SET"): - msg = "You have multiple proxy meshes, please only use one" - assert len(members) == 1, msg + if len(members) != 1: + msg = "You have multiple proxy meshes, please only use one" + raise KnownPublishError(msg) instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) - # Indicate to user that it'll be a single frame. - sequence = instance.data.get("exportSequence", False) - if not sequence: - group = re.compile(r" \[.*\]") - instance.data["label"] = group.sub("", instance.data["label"]) + group = re.compile(r" \[.*\]") + instance.data["label"] = group.sub("", instance.data["label"]) # Use camera in object set if present else default to render globals # camera. diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 7fc0cc1b2ff..3442d47ae9f 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -21,7 +21,7 @@ def process(self, instance): staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) - filenames = list() + filenames = [] file_path = os.path.join(staging_dir, filename) # Mask @@ -78,33 +78,21 @@ def process(self, instance): ) cmds.select(instance.data["setMembers"], noExpand=True) - if sequence: - self.log.info("Extracting ass sequence") - - # Collect the start and end including handles - kwargs.update({ - "start": instance.data.get("frameStartHandle", 1), - "end": instance.data.get("frameEndHandle", 1), - "step": instance.data.get("step", 0) - }) - - exported_files = cmds.arnoldExportAss(**kwargs) - - for file in exported_files: - filenames.append(os.path.split(file)[1]) - - self.log.info("Exported: {}".format(filenames)) - else: - self.log.info("Extracting ass") - cmds.arnoldExportAss(**kwargs) - self.log.info("Extracted {}".format(filename)) - filenames = filename - optionals = [ - "frameStart", "frameEnd", "step", "handles", - "handleEnd", "handleStart" - ] - for key in optionals: - instance.data.pop(key, None) + self.log.info("Extracting ass sequence") + + # Collect the start and end including handles + kwargs.update({ + "start": instance.data.get("frameStartHandle", 1), + "end": instance.data.get("frameEndHandle", 1), + "step": instance.data.get("step", 0) + }) + + exported_files = cmds.arnoldExportAss(**kwargs) + + for file in exported_files: + filenames.append(os.path.split(file)[1]) + + self.log.info("Exported: {}".format(filenames)) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index a74f8e5827b..0b4ee704de3 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -172,7 +172,6 @@ "defaults": [ "Main" ], - "exportSequence": false, "expandProcedurals": false, "motionBlur": true, "motionBlurKeys": 2, From bdc66ae574b606ee3b323cf429f1133a28ecd48a Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Thu, 8 Dec 2022 23:31:40 +0800 Subject: [PATCH 075/130] update removing bbox codes within the extractor --- .../maya/plugins/create/create_proxy_abc.py | 2 - .../maya/plugins/publish/extract_proxy_abc.py | 37 ++----------------- 2 files changed, 3 insertions(+), 36 deletions(-) diff --git a/openpype/hosts/maya/plugins/create/create_proxy_abc.py b/openpype/hosts/maya/plugins/create/create_proxy_abc.py index 2d81cb663b2..2946f7b5302 100644 --- a/openpype/hosts/maya/plugins/create/create_proxy_abc.py +++ b/openpype/hosts/maya/plugins/create/create_proxy_abc.py @@ -27,8 +27,6 @@ def __init__(self, *args, **kwargs): # Default to exporting world-space self.data["worldSpace"] = True - # remove the bbBox after publish - self.data["removeBoundingBoxAfterPublish"] = False # name suffix for the bounding box self.data["nameSuffix"] = "_BBox" diff --git a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py index 07c28a231a7..cf6351fdca6 100644 --- a/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py +++ b/openpype/hosts/maya/plugins/publish/extract_proxy_abc.py @@ -83,11 +83,9 @@ def process(self, instance): instance.context.data["cleanupFullPaths"].append(path) self.log.info("Extracted {} to {}".format(instance, dirname)) - - remove_bb = instance.data.get("removeBoundingBoxAfterPublish") - if remove_bb: - bbox_master = cmds.ls("bbox_grp") - cmds.delete(bbox_master) + # remove the bounding box + bbox_master = cmds.ls("bbox_grp") + cmds.delete(bbox_master) def create_proxy_geometry(self, instance, name_suffix, start, end): nodes = instance[:] @@ -103,38 +101,9 @@ def create_proxy_geometry(self, instance, name_suffix, start, end): bakeAnimation=True, startTime=start, endTime=end) - # select the top group - self.top_hierarchy_selection() # create master group for bounding # boxes as the main root master_group = cmds.group(name="bbox_grp") bbox_sel = cmds.ls(master_group, long=True) self.log.debug("proxy_root: {}".format(bbox_sel)) return bbox_sel - - # find the top group of the bounding box transform - def top_hierarchy_selection(self): - targets = cmds.ls(sl=True, long=True) - top_grp_list = [] - for target in targets: - top_parent = None - stop = False - - while not stop: - top_grp = cmds.listRelatives(top_parent or target, - parent=True, - path=True) - if top_grp is None: - # the loop would be stopped - # after top group found - stop = True - else: - top_parent = top_grp[0] - - if top_grp: - self.log.debug('{} is the top group'.format(top_parent)) - if top_parent in top_grp_list: - continue - top_grp_list.append(top_parent) - - return cmds.select(top_grp_list) From a209140cd6f44ce62beb097f220a7fa3d21d1fa1 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Dec 2022 16:39:51 +0100 Subject: [PATCH 076/130] :bug: handle single frames --- openpype/hosts/maya/plugins/publish/extract_ass.py | 8 +++----- .../projects_schema/schemas/schema_maya_create.json | 5 ----- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 3442d47ae9f..0678da65497 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -100,13 +100,11 @@ def process(self, instance): representation = { 'name': 'ass', 'ext': 'ass', - 'files': filenames, - "stagingDir": staging_dir + 'files': filenames if len(filenames) > 1 else filenames[0], + "stagingDir": staging_dir, + 'frameStart': kwargs["start"] } - if sequence: - representation['frameStart'] = kwargs["start"] - instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 6cf11e4cea5..f66b0181de5 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -218,11 +218,6 @@ "label": "Default Subsets", "object_type": "text" }, - { - "type": "boolean", - "key": "exportSequence", - "label": "Export Sequence" - }, { "type": "boolean", "key": "expandProcedurals", From db4139fc3774b4f70999e1d06a8fe2491291dd40 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 8 Dec 2022 16:26:05 +0000 Subject: [PATCH 077/130] Remove redundant viewport lib --- openpype/hosts/maya/api/viewport.py | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 openpype/hosts/maya/api/viewport.py diff --git a/openpype/hosts/maya/api/viewport.py b/openpype/hosts/maya/api/viewport.py deleted file mode 100644 index cbf78ab8157..00000000000 --- a/openpype/hosts/maya/api/viewport.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -"""Tools for working with viewport in Maya.""" -import contextlib -from maya import cmds # noqa - - -@contextlib.contextmanager -def vp2_paused_context(): - """Context manager to stop updating of vp2 viewport.""" - state = cmds.ogs(pause=True, query=True) - - if not state: - cmds.ogs(pause=True) - - try: - yield - finally: - if cmds.ogs(pause=True, query=True) != state: - cmds.ogs(pause=True) From 8d8753b7293969374c26e733efea0452cc1b0048 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 8 Dec 2022 16:26:17 +0000 Subject: [PATCH 078/130] Clean up collector --- openpype/hosts/maya/plugins/publish/collect_ass.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 45ec5b124ee..b5e05d66657 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,5 +1,3 @@ -import re - from maya import cmds from openpype.pipeline.publish import KnownPublishError @@ -33,9 +31,6 @@ def process(self, instance): instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) - group = re.compile(r" \[.*\]") - instance.data["label"] = group.sub("", instance.data["label"]) - # Use camera in object set if present else default to render globals # camera. cameras = cmds.ls(type="camera", long=True) From 096cda17623121aaaad582068b200001b61e471b Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Thu, 8 Dec 2022 16:26:35 +0000 Subject: [PATCH 079/130] Fix frame flags. --- .../hosts/maya/plugins/publish/extract_ass.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 0678da65497..049f256a7af 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -16,9 +16,6 @@ class ExtractAssStandin(publish.Extractor): asciiAss = False def process(self, instance): - - sequence = instance.data.get("exportSequence", False) - staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) filenames = [] @@ -60,6 +57,9 @@ def process(self, instance): # Write out .ass file kwargs = { "filename": file_path, + "startFrame": instance.data.get("frameStartHandle", 1), + "endFrame": instance.data.get("frameEndHandle", 1), + "frameStep": instance.data.get("step", 1), "selected": True, "asciiAss": self.asciiAss, "shadowLinks": True, @@ -78,14 +78,9 @@ def process(self, instance): ) cmds.select(instance.data["setMembers"], noExpand=True) - self.log.info("Extracting ass sequence") - - # Collect the start and end including handles - kwargs.update({ - "start": instance.data.get("frameStartHandle", 1), - "end": instance.data.get("frameEndHandle", 1), - "step": instance.data.get("step", 0) - }) + self.log.info( + "Extracting ass sequence with: {}".format(kwargs) + ) exported_files = cmds.arnoldExportAss(**kwargs) @@ -102,7 +97,7 @@ def process(self, instance): 'ext': 'ass', 'files': filenames if len(filenames) > 1 else filenames[0], "stagingDir": staging_dir, - 'frameStart': kwargs["start"] + 'frameStart': kwargs["startFrame"] } instance.data["representations"].append(representation) From 834edafa472d97f0944b40a87ac44015a9d27007 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Dec 2022 22:20:14 +0100 Subject: [PATCH 080/130] :art: support for unreal engine 5.1 --- openpype/hosts/unreal/api/pipeline.py | 8 +++++++- .../hosts/unreal/hooks/pre_workfile_preparation.py | 1 + .../unreal/plugins/publish/collect_instances.py | 13 ++++++++----- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index d396b64072c..db5b121d14f 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -2,6 +2,7 @@ import os import logging from typing import List +import semver import pyblish.api @@ -21,6 +22,8 @@ logger = logging.getLogger("openpype.hosts.unreal") OPENPYPE_CONTAINERS = "OpenPypeContainers" +UNREAL_VERSION = semver.VersionInfo( + *os.getenv("OPENPYPE_UNREAL_VERSION").split(".")) HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.unreal.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") @@ -40,6 +43,7 @@ class UnrealHost(HostBase, ILoadHost): name = "unreal" def install(self): + version = UNREAL_VERSION install() def get_containers(self): @@ -111,7 +115,9 @@ def ls(): """ ar = unreal.AssetRegistryHelpers.get_asset_registry() - openpype_containers = ar.get_assets_by_class("AssetContainer", True) + # UE 5.1 changed how class name is specified + class_name = ["/Script", "AssetContainer"] if UNREAL_VERSION.major == 5 and UNREAL_VERSION.minor > 0 else "AssetContainer" # noqa + openpype_containers = ar.get_assets_by_class(class_name, True) # get_asset_by_class returns AssetData. To get all metadata we need to # load asset. get_tag_values() work only on metadata registered in diff --git a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py index 4ae72593e9b..2dc6fb9f428 100644 --- a/openpype/hosts/unreal/hooks/pre_workfile_preparation.py +++ b/openpype/hosts/unreal/hooks/pre_workfile_preparation.py @@ -150,6 +150,7 @@ def execute(self): engine_path=Path(engine_path) ) + self.launch_context.env["OPENPYPE_UNREAL_VERSION"] = engine_version # Append project file to launch arguments self.launch_context.launch_args.append( f"\"{project_file.as_posix()}\"") diff --git a/openpype/hosts/unreal/plugins/publish/collect_instances.py b/openpype/hosts/unreal/plugins/publish/collect_instances.py index 2f604cb322c..db968330c6f 100644 --- a/openpype/hosts/unreal/plugins/publish/collect_instances.py +++ b/openpype/hosts/unreal/plugins/publish/collect_instances.py @@ -3,6 +3,8 @@ import ast import unreal # noqa import pyblish.api +from openpype.hosts.unreal.api.pipeline import UNREAL_VERSION +from openpype.pipeline.publish import KnownPublishError class CollectInstances(pyblish.api.ContextPlugin): @@ -23,8 +25,10 @@ class CollectInstances(pyblish.api.ContextPlugin): def process(self, context): ar = unreal.AssetRegistryHelpers.get_asset_registry() - instance_containers = ar.get_assets_by_class( - "OpenPypePublishInstance", True) + class_name = ["/Script", + "AssetContainer"] if UNREAL_VERSION.major == 5 and \ + UNREAL_VERSION.minor > 0 else "OpenPypePublishInstance" # noqa + instance_containers = ar.get_assets_by_class(class_name, True) for container_data in instance_containers: asset = container_data.get_asset() @@ -32,9 +36,8 @@ def process(self, context): data["objectName"] = container_data.asset_name # convert to strings data = {str(key): str(value) for (key, value) in data.items()} - assert data.get("family"), ( - "instance has no family" - ) + if not data.get("family"): + raise KnownPublishError("instance has no family") # content of container members = ast.literal_eval(data.get("members")) From 151bb60679da425784d74b4e1fd9f5f6c47dee18 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Thu, 8 Dec 2022 22:25:47 +0100 Subject: [PATCH 081/130] :rotating_light: fix Hound --- openpype/hosts/unreal/api/pipeline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index db5b121d14f..839465881dc 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -23,7 +23,8 @@ logger = logging.getLogger("openpype.hosts.unreal") OPENPYPE_CONTAINERS = "OpenPypeContainers" UNREAL_VERSION = semver.VersionInfo( - *os.getenv("OPENPYPE_UNREAL_VERSION").split(".")) + *os.getenv("OPENPYPE_UNREAL_VERSION").split(".") +) HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.unreal.__file__)) PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") @@ -43,7 +44,6 @@ class UnrealHost(HostBase, ILoadHost): name = "unreal" def install(self): - version = UNREAL_VERSION install() def get_containers(self): From a8969fb5c2e1560e58a269a75165289d25f3c02e Mon Sep 17 00:00:00 2001 From: Felix David Date: Fri, 9 Dec 2022 09:57:10 +0100 Subject: [PATCH 082/130] Feature: API token refreshed every week --- openpype/modules/kitsu/utils/sync_service.py | 14 +++++++++----- pyproject.toml | 2 +- website/docs/module_kitsu.md | 2 ++ 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/openpype/modules/kitsu/utils/sync_service.py b/openpype/modules/kitsu/utils/sync_service.py index 441b95a7ec6..237746bea03 100644 --- a/openpype/modules/kitsu/utils/sync_service.py +++ b/openpype/modules/kitsu/utils/sync_service.py @@ -1,12 +1,9 @@ import os +import threading import gazu -from openpype.client import ( - get_project, - get_assets, - get_asset_by_name -) +from openpype.client import get_project, get_assets, get_asset_by_name from openpype.pipeline import AvalonMongoDB from .credentials import validate_credentials from .update_op_with_zou import ( @@ -397,6 +394,13 @@ def start_listeners(login: str, password: str): login (str): Kitsu user login password (str): Kitsu user password """ + # Refresh token every week + def refresh_token_every_week(): + print("Refreshing token...") + gazu.refresh_token() + threading.Timer(7 * 3600 * 24, refresh_token_every_week).start() + + refresh_token_every_week() # Connect to server listener = Listener(login, password) diff --git a/pyproject.toml b/pyproject.toml index f74f40c5615..20e676dcde5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ Click = "^7" dnspython = "^2.1.0" ftrack-python-api = "^2.3.3" shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"} -gazu = "^0.8.28" +gazu = "^0.8.32" google-api-python-client = "^1.12.8" # sync server google support (should be separate?) jsonschema = "^2.6.0" keyring = "^22.0.1" diff --git a/website/docs/module_kitsu.md b/website/docs/module_kitsu.md index ec38cce5e13..73e31a280b4 100644 --- a/website/docs/module_kitsu.md +++ b/website/docs/module_kitsu.md @@ -26,6 +26,8 @@ openpype_console module kitsu sync-service -l me@domain.ext -p my_password ### Events listening Listening to Kitsu events is the key to automation of many tasks like _project/episode/sequence/shot/asset/task create/update/delete_ and some more. Events listening should run at all times to perform the required processing as it is not possible to catch some of them retrospectively with strong reliability. If such timeout has been encountered, you must relaunch the `sync-service` command to run the synchronization step again. +Connection token is refreshed every week. + ### Push to Kitsu An utility function is provided to help update Kitsu data (a.k.a Zou database) with OpenPype data if the publishing to the production tracker hasn't been possible for some time. Running `push-to-zou` will create the data on behalf of the user. :::caution From fb3236675b90b023f98a83a7ada7e9aef278ca91 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 9 Dec 2022 10:07:14 +0000 Subject: [PATCH 083/130] Fix cmds.refresh not queryable --- openpype/hosts/maya/api/lib.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index b2bbb823aa8..787a8fd8ad8 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -128,13 +128,18 @@ def get_main_window(): @contextlib.contextmanager def suspended_refresh(suspend=True): - """Suspend viewport refreshes""" - original_state = cmds.refresh(query=True, suspend=True) + """Suspend viewport refreshes + + cmds.ogs(pause=True) is a toggle so we cant pass False. + """ + original_state = cmds.ogs(query=True, pause=True) try: - cmds.refresh(suspend=suspend) + if suspend and not original_state: + cmds.ogs(pause=True) yield finally: - cmds.refresh(suspend=original_state) + if suspend and not original_state: + cmds.ogs(pause=True) @contextlib.contextmanager From 67a45524a2c1bc331fce1650f633bc6a86274573 Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Fri, 9 Dec 2022 10:07:45 +0000 Subject: [PATCH 084/130] Fix inverted refresh boolean --- openpype/hosts/maya/plugins/publish/extract_pointcache.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index 23b76a48c2e..7ed73fd5b08 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -86,7 +86,8 @@ def process(self, instance): start=start, end=end)) - with suspended_refresh(suspend=instance.data.get("refresh", False)): + suspend = not instance.data.get("refresh", False) + with suspended_refresh(suspend=suspend): with maintained_selection(): cmds.select(nodes, noExpand=True) extract_alembic( From f51325543a1965f63e5dd70980227311b735c962 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 9 Dec 2022 11:32:54 +0100 Subject: [PATCH 085/130] change default command for headless mode --- openpype/cli.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/openpype/cli.py b/openpype/cli.py index d24cd4a872f..7611915d840 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -29,8 +29,14 @@ def main(ctx): It wraps different commands together. """ + if ctx.invoked_subcommand is None: - ctx.invoke(tray) + # Default command for headless openpype is 'interactive' command + # otherwise 'tray' is used. + if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1": + ctx.invoke(interactive) + else: + ctx.invoke(tray) @main.command() From b713a2e0c6546fb1c017d6d3a6000d433eaf3dde Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 9 Dec 2022 18:54:27 +0800 Subject: [PATCH 086/130] resolve the conflict for the project scheme --- .../schemas/projects_schema/schemas/schema_maya_create.json | 4 ---- 1 file changed, 4 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 198b399e75d..231554d96e2 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -247,10 +247,6 @@ "key": "CreateMultiverseUsdOver", "label": "Create Multiverse USD Override" }, - { - "key": "CreateAss", - "label": "Create Ass" - }, { "key": "CreateAssembly", "label": "Create Assembly" From 8a533e59b28c306615eb3e5a415f62217780e734 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 9 Dec 2022 19:18:28 +0800 Subject: [PATCH 087/130] resolve conflict --- .../schemas/schema_maya_create.json | 93 +------------------ 1 file changed, 1 insertion(+), 92 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index e1a30826168..231554d96e2 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -230,98 +230,7 @@ } ] }, - { - "type": "dict", - "collapsible": true, - "key": "CreateAss", - "label": "Create Ass", - "checkbox_key": "enabled", - "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, - { - "type": "list", - "key": "defaults", - "label": "Default Subsets", - "object_type": "text" - }, - { - "type": "boolean", - "key": "expandProcedurals", - "label": "Expand Procedurals" - }, - { - "type": "boolean", - "key": "motionBlur", - "label": "Motion Blur" - }, - { - "type": "number", - "key": "motionBlurKeys", - "label": "Motion Blur Keys", - "minimum": 0 - }, - { - "type": "number", - "key": "motionBlurLength", - "label": "Motion Blur Length", - "decimal": 3 - }, - { - "type": "boolean", - "key": "maskOptions", - "label": "Mask Options" - }, - { - "type": "boolean", - "key": "maskCamera", - "label": "Mask Camera" - }, - { - "type": "boolean", - "key": "maskLight", - "label": "Mask Light" - }, - { - "type": "boolean", - "key": "maskShape", - "label": "Mask Shape" - }, - { - "type": "boolean", - "key": "maskShader", - "label": "Mask Shader" - }, - { - "type": "boolean", - "key": "maskOverride", - "label": "Mask Override" - }, - { - "type": "boolean", - "key": "maskDriver", - "label": "Mask Driver" - }, - { - "type": "boolean", - "key": "maskFilter", - "label": "Mask Filter" - }, - { - "type": "boolean", - "key": "maskColor_manager", - "label": "Mask Color Manager" - }, - { - "type": "boolean", - "key": "maskOperator", - "label": "Mask Operator" - } - ] - }, + { "type": "schema_template", "name": "template_create_plugin", From b52018fc6231655d1a923e5c2cdd8c3903708be0 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Fri, 9 Dec 2022 19:21:17 +0800 Subject: [PATCH 088/130] resolve conflicts --- CHANGELOG.md | 84 +++- HISTORY.md | 114 +++++ openpype/action.py | 20 +- openpype/client/entities.py | 7 +- .../hooks/pre_copy_last_published_workfile.py | 2 +- openpype/host/interfaces.py | 2 +- .../plugins/publish/extract_abc_animation.py | 72 +++ openpype/hosts/celaction/__init__.py | 10 + openpype/hosts/celaction/addon.py | 31 ++ openpype/hosts/celaction/api/__init__.py | 1 - openpype/hosts/celaction/api/cli.py | 87 ---- .../hooks/pre_celaction_registers.py | 122 ------ .../celaction/hooks/pre_celaction_setup.py | 137 ++++++ .../publish/collect_celaction_cli_kwargs.py | 30 +- .../publish/collect_celaction_instances.py | 12 +- .../plugins/publish/collect_render_path.py | 23 +- openpype/hosts/celaction/scripts/__init__.py | 0 .../hosts/celaction/scripts/publish_cli.py | 37 ++ openpype/hosts/flame/api/plugin.py | 72 ++- .../hosts/flame/plugins/load/load_clip.py | 7 +- .../flame/plugins/load/load_clip_batch.py | 6 +- openpype/hosts/hiero/addon.py | 5 + openpype/hosts/hiero/api/__init__.py | 19 +- openpype/hosts/hiero/api/lib.py | 226 ++++++++-- openpype/hosts/hiero/api/pipeline.py | 138 ++++-- openpype/hosts/hiero/api/tags.py | 18 +- .../hosts/hiero/plugins/load/load_effects.py | 308 +++++++++++++ .../plugins/publish/collect_clip_effects.py | 3 + .../plugins/publish/precollect_instances.py | 2 +- openpype/hosts/houdini/api/__init__.py | 30 +- openpype/hosts/houdini/api/lib.py | 207 +++++++-- openpype/hosts/houdini/api/pipeline.py | 213 +++++---- openpype/hosts/houdini/api/plugin.py | 200 ++++++++- openpype/hosts/houdini/api/workio.py | 57 --- .../houdini/plugins/create/convert_legacy.py | 74 ++++ .../plugins/create/create_alembic_camera.py | 49 ++- .../plugins/create/create_arnold_ass.py | 46 +- .../plugins/create/create_composite.py | 52 ++- .../houdini/plugins/create/create_hda.py | 68 ++- .../plugins/create/create_pointcache.py | 61 +-- .../plugins/create/create_redshift_proxy.py | 42 +- .../plugins/create/create_redshift_rop.py | 56 ++- .../houdini/plugins/create/create_usd.py | 40 +- .../plugins/create/create_usdrender.py | 39 +- .../plugins/create/create_vbd_cache.py | 38 +- .../houdini/plugins/create/create_workfile.py | 93 ++++ .../plugins/publish/collect_active_state.py | 3 +- .../plugins/publish/collect_current_file.py | 38 +- .../houdini/plugins/publish/collect_frames.py | 25 +- .../plugins/publish/collect_instances.py | 9 +- .../plugins/publish/collect_output_node.py | 2 +- .../plugins/publish/collect_redshift_rop.py | 2 +- .../publish/collect_render_products.py | 2 +- .../plugins/publish/collect_usd_bootstrap.py | 2 +- .../plugins/publish/collect_usd_layers.py | 10 +- .../plugins/publish/extract_alembic.py | 4 +- .../houdini/plugins/publish/extract_ass.py | 12 +- .../plugins/publish/extract_composite.py | 31 +- .../houdini/plugins/publish/extract_hda.py | 6 +- .../plugins/publish/extract_redshift_proxy.py | 4 +- .../houdini/plugins/publish/extract_usd.py | 3 +- .../plugins/publish/extract_usd_layered.py | 2 +- .../plugins/publish/extract_vdb_cache.py | 4 +- .../publish/help/validate_vdb_input_node.xml | 21 + .../plugins/publish/increment_current_file.py | 6 +- .../houdini/plugins/publish/save_scene.py | 6 +- .../plugins/publish/valiate_vdb_input_node.py | 47 -- .../validate_abc_primitive_to_detail.py | 40 +- .../publish/validate_alembic_face_sets.py | 9 +- .../publish/validate_alembic_input_node.py | 29 +- .../publish/validate_animation_settings.py | 3 +- .../plugins/publish/validate_bypass.py | 15 +- .../plugins/publish/validate_camera_rop.py | 47 +- .../publish/validate_cop_output_node.py | 34 +- .../publish/validate_file_extension.py | 15 +- .../plugins/publish/validate_frame_token.py | 3 +- .../validate_houdini_license_category.py | 10 +- .../publish/validate_mkpaths_toggled.py | 13 +- .../plugins/publish/validate_no_errors.py | 11 +- .../validate_primitive_hierarchy_paths.py | 34 +- .../publish/validate_remote_publish.py | 27 +- .../validate_remote_publish_enabled.py | 11 +- .../publish/validate_sop_output_node.py | 21 +- .../validate_usd_layer_path_backslashes.py | 12 +- .../publish/validate_usd_model_and_shade.py | 10 +- .../publish/validate_usd_output_node.py | 11 +- .../validate_usd_render_product_names.py | 7 +- .../plugins/publish/validate_usd_setdress.py | 10 +- .../validate_usd_shade_model_exists.py | 9 +- .../publish/validate_usd_shade_workspace.py | 25 +- .../publish/validate_vdb_input_node.py | 13 +- .../publish/validate_vdb_output_node.py | 12 +- .../publish/validate_workfile_paths.py | 17 +- .../hosts/houdini/startup/MainMenuCommon.xml | 10 +- .../houdini/startup/python2.7libs/pythonrc.py | 6 +- .../houdini/startup/python3.7libs/pythonrc.py | 6 +- .../houdini/startup/python3.9libs/pythonrc.py | 6 +- openpype/hosts/max/__init__.py | 10 + openpype/hosts/max/addon.py | 16 + openpype/hosts/max/api/__init__.py | 20 + openpype/hosts/max/api/lib.py | 122 ++++++ openpype/hosts/max/api/menu.py | 130 ++++++ openpype/hosts/max/api/pipeline.py | 145 +++++++ openpype/hosts/max/api/plugin.py | 111 +++++ openpype/hosts/max/hooks/set_paths.py | 17 + openpype/hosts/max/plugins/__init__.py | 0 .../max/plugins/create/create_pointcache.py | 22 + .../hosts/max/plugins/load/load_pointcache.py | 65 +++ .../max/plugins/publish/collect_workfile.py | 63 +++ .../max/plugins/publish/extract_pointcache.py | 100 +++++ .../plugins/publish/validate_scene_saved.py | 18 + openpype/hosts/max/startup/startup.ms | 9 + openpype/hosts/max/startup/startup.py | 6 + openpype/hosts/maya/api/gltf.py | 88 ++++ openpype/hosts/maya/api/lib.py | 8 +- openpype/hosts/maya/api/lib_renderproducts.py | 19 +- .../hosts/maya/plugins/create/create_ass.py | 48 +- .../maya/plugins/create/create_pointcache.py | 1 + .../hosts/maya/plugins/publish/collect_ass.py | 19 +- .../maya/plugins/publish/collect_gltf.py | 17 + .../maya/plugins/publish/collect_look.py | 4 +- .../hosts/maya/plugins/publish/extract_ass.py | 126 +++--- .../maya/plugins/publish/extract_gltf.py | 65 +++ .../maya/plugins/publish/extract_look.py | 2 +- .../maya/plugins/publish/extract_playblast.py | 6 + .../plugins/publish/extract_pointcache.py | 12 +- .../maya/plugins/publish/extract_thumbnail.py | 5 + openpype/hosts/nuke/addon.py | 5 + openpype/hosts/nuke/api/lib.py | 2 +- openpype/hosts/nuke/api/pipeline.py | 3 + .../nuke/plugins/load/load_camera_abc.py | 3 + openpype/hosts/nuke/plugins/load/load_clip.py | 3 + .../hosts/nuke/plugins/load/load_effects.py | 3 + .../nuke/plugins/load/load_effects_ip.py | 3 + .../hosts/nuke/plugins/load/load_image.py | 8 +- .../hosts/nuke/plugins/load/load_model.py | 4 + .../nuke/plugins/load/load_script_precomp.py | 3 + .../plugins/publish/extract_slate_frame.py | 2 +- .../plugins/create/create_legacy_image.py | 5 +- .../publish/validate_texture_workfiles.py | 27 +- .../plugins/create/create_online.py | 96 ++++ .../plugins/publish/collect_online_file.py | 23 + .../plugins/publish/validate_online_file.py | 32 ++ .../plugins/publish/extract_sequence.py | 5 +- .../Private/OpenPypePublishInstance.cpp | 173 +++++--- .../OpenPypePublishInstanceFactory.cpp | 6 +- .../OpenPype/Public/OpenPypePublishInstance.h | 102 ++++- .../Public/OpenPypePublishInstanceFactory.h | 4 +- .../Private/OpenPypePublishInstance.cpp | 174 +++++--- .../OpenPypePublishInstanceFactory.cpp | 6 +- .../OpenPype/Public/OpenPypePublishInstance.h | 96 +++- .../Public/OpenPypePublishInstanceFactory.h | 4 +- .../publish/collect_published_files.py | 16 +- openpype/lib/attribute_definitions.py | 52 ++- openpype/lib/file_transaction.py | 87 ++-- openpype/lib/path_templates.py | 2 +- openpype/lib/transcoding.py | 76 +++- openpype/lib/vendor_bin_utils.py | 79 ++-- .../publish/submit_celaction_deadline.py | 106 ++--- .../plugins/publish/submit_publish_job.py | 9 +- .../custom/plugins/CelAction/CelAction.ico | Bin 0 -> 103192 bytes .../custom/plugins/CelAction/CelAction.param | 38 ++ .../custom/plugins/CelAction/CelAction.py | 122 ++++++ .../custom/plugins/GlobalJobPreLoad.py | 364 +++++++++++----- openpype/modules/ftrack/lib/avalon_sync.py | 22 +- .../plugins/publish/integrate_ftrack_api.py | 71 +-- .../publish/integrate_ftrack_description.py | 2 +- .../plugins/publish/integrate_ftrack_note.py | 2 +- .../publish/integrate_hierarchy_ftrack.py | 409 +++++++++++------- .../ftrack/scripts/sub_event_status.py | 11 + .../plugins/publish/integrate_kitsu_review.py | 1 - .../plugins/publish/collect_slack_family.py | 6 +- .../plugins/publish/integrate_slack_api.py | 35 +- openpype/pipeline/create/context.py | 3 +- openpype/pipeline/create/creator_plugins.py | 5 +- openpype/pipeline/publish/publish_plugins.py | 20 +- .../publish/collect_anatomy_instance_data.py | 2 +- openpype/plugins/publish/collect_audio.py | 185 +++++--- openpype/plugins/publish/collect_comment.py | 126 +++++- .../plugins/publish/collect_resources_path.py | 1 + openpype/plugins/publish/extract_burnin.py | 2 +- .../publish/extract_hierarchy_avalon.py | 367 +++++++++------- openpype/plugins/publish/extract_review.py | 71 +-- .../publish/extract_thumbnail_from_source.py | 1 + openpype/plugins/publish/integrate.py | 6 +- openpype/plugins/publish/integrate_legacy.py | 3 +- .../plugins/publish/integrate_thumbnail.py | 65 ++- openpype/resources/app_icons/3dsmax.png | Bin 0 -> 12804 bytes openpype/resources/app_icons/celaction.png | Bin 0 -> 4012 bytes .../resources/app_icons/celaction_local.png | Bin 40783 -> 0 bytes .../resources/app_icons/celaction_remotel.png | Bin 36400 -> 0 bytes .../defaults/project_anatomy/templates.json | 8 +- .../defaults/project_settings/celaction.json | 12 +- .../defaults/project_settings/deadline.json | 10 + .../defaults/project_settings/global.json | 15 + .../defaults/project_settings/maya.json | 21 +- .../defaults/project_settings/tvpaint.json | 5 + .../system_settings/applications.json | 35 +- openpype/settings/entities/enum_entity.py | 1 + .../schema_project_celaction.json | 37 +- .../schema_project_deadline.json | 50 +++ .../schema_project_tvpaint.json | 12 + .../schemas/schema_anatomy_attributes.json | 12 +- .../schemas/schema_global_publish.json | 21 + .../schemas/schema_maya_create.json | 93 +++- .../schemas/schema_maya_publish.json | 16 +- .../host_settings/schema_3dsmax.json | 39 ++ .../host_settings/schema_celaction.json | 4 +- .../system_schema/schema_applications.json | 4 + openpype/style/style.css | 4 + openpype/tools/attribute_defs/widgets.py | 42 +- openpype/tools/creator/model.py | 2 +- .../project_manager/project_manager/view.py | 2 +- openpype/tools/publisher/widgets/__init__.py | 2 + .../publisher/widgets/card_view_widgets.py | 11 +- .../publisher/widgets/list_view_widgets.py | 7 + .../publisher/widgets/overview_widget.py | 14 + .../tools/publisher/widgets/tabs_widget.py | 12 + .../publisher/widgets/validations_widget.py | 2 +- openpype/tools/publisher/widgets/widgets.py | 204 ++++++++- openpype/tools/publisher/window.py | 198 ++++++++- openpype/tools/settings/settings/constants.py | 1 - .../widgets/widget_drop_frame.py | 2 +- openpype/tools/utils/host_tools.py | 16 +- .../vendor/python/python_2/secrets/LICENSE | 21 + .../python/python_2/secrets/__init__.py | 16 + .../vendor/python/python_2/secrets/secrets.py | 132 ++++++ openpype/version.py | 2 +- setup.cfg | 3 +- tests/README.md | 10 + tests/conftest.py | 12 + tests/integration/hosts/aftereffects/lib.py | 22 +- ...=> test_publish_in_aftereffects_legacy.py} | 40 +- ...test_publish_in_aftereffects_multiframe.py | 64 --- tests/integration/hosts/maya/lib.py | 19 +- .../hosts/maya/test_publish_in_maya.py | 57 ++- tests/integration/hosts/nuke/lib.py | 32 +- .../hosts/nuke/test_publish_in_nuke.py | 25 +- tests/integration/hosts/photoshop/lib.py | 11 +- .../photoshop/test_publish_in_photoshop.py | 6 +- tests/lib/db_handler.py | 23 +- tests/lib/testing_classes.py | 68 ++- tests/resources/test_data.zip | Bin 7350 -> 5098 bytes tests/unit/igniter/test_bootstrap_repos.py | 30 +- tools/run_mongo.ps1 | 2 + website/docs/dev_build.md | 4 +- website/docs/dev_requirements.md | 2 +- website/yarn.lock | 6 +- 248 files changed, 7290 insertions(+), 2339 deletions(-) create mode 100644 openpype/hosts/blender/plugins/publish/extract_abc_animation.py create mode 100644 openpype/hosts/celaction/addon.py delete mode 100644 openpype/hosts/celaction/api/__init__.py delete mode 100644 openpype/hosts/celaction/api/cli.py delete mode 100644 openpype/hosts/celaction/hooks/pre_celaction_registers.py create mode 100644 openpype/hosts/celaction/hooks/pre_celaction_setup.py create mode 100644 openpype/hosts/celaction/scripts/__init__.py create mode 100644 openpype/hosts/celaction/scripts/publish_cli.py create mode 100644 openpype/hosts/hiero/plugins/load/load_effects.py delete mode 100644 openpype/hosts/houdini/api/workio.py create mode 100644 openpype/hosts/houdini/plugins/create/convert_legacy.py create mode 100644 openpype/hosts/houdini/plugins/create/create_workfile.py create mode 100644 openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml delete mode 100644 openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py create mode 100644 openpype/hosts/max/__init__.py create mode 100644 openpype/hosts/max/addon.py create mode 100644 openpype/hosts/max/api/__init__.py create mode 100644 openpype/hosts/max/api/lib.py create mode 100644 openpype/hosts/max/api/menu.py create mode 100644 openpype/hosts/max/api/pipeline.py create mode 100644 openpype/hosts/max/api/plugin.py create mode 100644 openpype/hosts/max/hooks/set_paths.py create mode 100644 openpype/hosts/max/plugins/__init__.py create mode 100644 openpype/hosts/max/plugins/create/create_pointcache.py create mode 100644 openpype/hosts/max/plugins/load/load_pointcache.py create mode 100644 openpype/hosts/max/plugins/publish/collect_workfile.py create mode 100644 openpype/hosts/max/plugins/publish/extract_pointcache.py create mode 100644 openpype/hosts/max/plugins/publish/validate_scene_saved.py create mode 100644 openpype/hosts/max/startup/startup.ms create mode 100644 openpype/hosts/max/startup/startup.py create mode 100644 openpype/hosts/maya/api/gltf.py create mode 100644 openpype/hosts/maya/plugins/publish/collect_gltf.py create mode 100644 openpype/hosts/maya/plugins/publish/extract_gltf.py create mode 100644 openpype/hosts/traypublisher/plugins/create/create_online.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/collect_online_file.py create mode 100644 openpype/hosts/traypublisher/plugins/publish/validate_online_file.py rename openpype/{hosts/celaction => modules/deadline}/plugins/publish/submit_celaction_deadline.py (73%) create mode 100644 openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico create mode 100644 openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param create mode 100644 openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py create mode 100644 openpype/resources/app_icons/3dsmax.png create mode 100644 openpype/resources/app_icons/celaction.png delete mode 100644 openpype/resources/app_icons/celaction_local.png delete mode 100644 openpype/resources/app_icons/celaction_remotel.png create mode 100644 openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json create mode 100644 openpype/vendor/python/python_2/secrets/LICENSE create mode 100644 openpype/vendor/python/python_2/secrets/__init__.py create mode 100644 openpype/vendor/python/python_2/secrets/secrets.py rename tests/integration/hosts/aftereffects/{test_publish_in_aftereffects.py => test_publish_in_aftereffects_legacy.py} (58%) delete mode 100644 tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 707b61676f9..3cca692b684 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,8 +1,88 @@ # Changelog -## [3.14.6](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) -[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...HEAD) +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) + +**🚀 Enhancements** + +- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139) +- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137) +- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129) +- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126) +- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115) +- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046) +- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148) + +**🐛 Bug fixes** + +- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153) +- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136) +- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) +- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) + + +## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) + +**🆕 New features** + +- Hiero: loading effect family to timeline [\#4055](https://github.com/pypeclub/OpenPype/pull/4055) + +**🚀 Enhancements** + +- Photoshop: bug with pop-up window on Instance Creator [\#4121](https://github.com/pypeclub/OpenPype/pull/4121) +- Publisher: Open on specific tab [\#4120](https://github.com/pypeclub/OpenPype/pull/4120) +- Publisher: Hide unknown publish values [\#4116](https://github.com/pypeclub/OpenPype/pull/4116) +- Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112) +- General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101) +- Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097) +- Publisher: Ignore escape button [\#4090](https://github.com/pypeclub/OpenPype/pull/4090) +- Flame: Loading clip with native colorspace resolved from mapping [\#4079](https://github.com/pypeclub/OpenPype/pull/4079) +- General: Extract review single frame output [\#4064](https://github.com/pypeclub/OpenPype/pull/4064) +- Publisher: Prepared common function for instance data cache [\#4063](https://github.com/pypeclub/OpenPype/pull/4063) +- Publisher: Easy access to publish page from create page [\#4058](https://github.com/pypeclub/OpenPype/pull/4058) +- General/TVPaint: Attribute defs dialog [\#4052](https://github.com/pypeclub/OpenPype/pull/4052) +- Publisher: Better reset defer [\#4048](https://github.com/pypeclub/OpenPype/pull/4048) +- Publisher: Add thumbnail sources [\#4042](https://github.com/pypeclub/OpenPype/pull/4042) + +**🐛 Bug fixes** + +- General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119) +- Slack: notification fail in new tray publisher [\#4118](https://github.com/pypeclub/OpenPype/pull/4118) +- Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114) +- Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113) +- Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096) +- Webpublisher: extension is lowercased in Setting and in uploaded files [\#4095](https://github.com/pypeclub/OpenPype/pull/4095) +- Publish Report Viewer: Fix small bugs [\#4086](https://github.com/pypeclub/OpenPype/pull/4086) +- Igniter: fix regex to match semver better [\#4085](https://github.com/pypeclub/OpenPype/pull/4085) +- Maya: aov filtering [\#4083](https://github.com/pypeclub/OpenPype/pull/4083) +- Flame/Flare: Loading to multiple batches [\#4080](https://github.com/pypeclub/OpenPype/pull/4080) +- hiero: creator from settings with set maximum [\#4077](https://github.com/pypeclub/OpenPype/pull/4077) +- Nuke: resolve hashes in file name only for frame token [\#4074](https://github.com/pypeclub/OpenPype/pull/4074) +- Publisher: Fix cache of asset docs [\#4070](https://github.com/pypeclub/OpenPype/pull/4070) +- Webpublisher: cleanup wp extract thumbnail [\#4067](https://github.com/pypeclub/OpenPype/pull/4067) +- Settings UI: Locked setting can't bypass lock [\#4066](https://github.com/pypeclub/OpenPype/pull/4066) +- Loader: Fix comparison of repre name [\#4053](https://github.com/pypeclub/OpenPype/pull/4053) +- Deadline: Extract environment subprocess failure [\#4050](https://github.com/pypeclub/OpenPype/pull/4050) + +**🔀 Refactored code** + +- General: Collect entities plugin minor changes [\#4089](https://github.com/pypeclub/OpenPype/pull/4089) +- General: Direct interfaces import [\#4065](https://github.com/pypeclub/OpenPype/pull/4065) + +**Merged pull requests:** + +- Bump loader-utils from 1.4.1 to 1.4.2 in /website [\#4100](https://github.com/pypeclub/OpenPype/pull/4100) +- Online family for Tray Publisher [\#4093](https://github.com/pypeclub/OpenPype/pull/4093) +- Bump loader-utils from 1.4.0 to 1.4.1 in /website [\#4081](https://github.com/pypeclub/OpenPype/pull/4081) +- remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059) +- Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047) + +## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) ### 📖 Documentation diff --git a/HISTORY.md b/HISTORY.md index f6cc74e114e..f4e132488b5 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,119 @@ # Changelog +## [3.14.8](https://github.com/pypeclub/OpenPype/tree/3.14.8) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.7...3.14.8) + +**🚀 Enhancements** + +- General: Refactored extract hierarchy plugin [\#4139](https://github.com/pypeclub/OpenPype/pull/4139) +- General: Find executable enhancement [\#4137](https://github.com/pypeclub/OpenPype/pull/4137) +- Ftrack: Reset session before instance processing [\#4129](https://github.com/pypeclub/OpenPype/pull/4129) +- Ftrack: Editorial asset sync issue [\#4126](https://github.com/pypeclub/OpenPype/pull/4126) +- Deadline: Build version resolving [\#4115](https://github.com/pypeclub/OpenPype/pull/4115) +- Houdini: New Publisher [\#3046](https://github.com/pypeclub/OpenPype/pull/3046) +- Fix: Standalone Publish Directories [\#4148](https://github.com/pypeclub/OpenPype/pull/4148) + +**🐛 Bug fixes** + +- Ftrack: Fix occational double parents issue [\#4153](https://github.com/pypeclub/OpenPype/pull/4153) +- General: Maketx executable issue [\#4136](https://github.com/pypeclub/OpenPype/pull/4136) +- Maya: Looks - add all connections [\#4135](https://github.com/pypeclub/OpenPype/pull/4135) +- General: Fix variable check in collect anatomy instance data [\#4117](https://github.com/pypeclub/OpenPype/pull/4117) + +## [3.14.7](https://github.com/pypeclub/OpenPype/tree/3.14.7) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.6...3.14.7) + +**🆕 New features** + +- Hiero: loading effect family to timeline [\#4055](https://github.com/pypeclub/OpenPype/pull/4055) + +**🚀 Enhancements** + +- Photoshop: bug with pop-up window on Instance Creator [\#4121](https://github.com/pypeclub/OpenPype/pull/4121) +- Publisher: Open on specific tab [\#4120](https://github.com/pypeclub/OpenPype/pull/4120) +- Publisher: Hide unknown publish values [\#4116](https://github.com/pypeclub/OpenPype/pull/4116) +- Ftrack: Event server status give more information about version locations [\#4112](https://github.com/pypeclub/OpenPype/pull/4112) +- General: Allow higher numbers in frames and clips [\#4101](https://github.com/pypeclub/OpenPype/pull/4101) +- Publisher: Settings for validate frame range [\#4097](https://github.com/pypeclub/OpenPype/pull/4097) +- Publisher: Ignore escape button [\#4090](https://github.com/pypeclub/OpenPype/pull/4090) +- Flame: Loading clip with native colorspace resolved from mapping [\#4079](https://github.com/pypeclub/OpenPype/pull/4079) +- General: Extract review single frame output [\#4064](https://github.com/pypeclub/OpenPype/pull/4064) +- Publisher: Prepared common function for instance data cache [\#4063](https://github.com/pypeclub/OpenPype/pull/4063) +- Publisher: Easy access to publish page from create page [\#4058](https://github.com/pypeclub/OpenPype/pull/4058) +- General/TVPaint: Attribute defs dialog [\#4052](https://github.com/pypeclub/OpenPype/pull/4052) +- Publisher: Better reset defer [\#4048](https://github.com/pypeclub/OpenPype/pull/4048) +- Publisher: Add thumbnail sources [\#4042](https://github.com/pypeclub/OpenPype/pull/4042) + +**🐛 Bug fixes** + +- General: Move default settings for template name [\#4119](https://github.com/pypeclub/OpenPype/pull/4119) +- Slack: notification fail in new tray publisher [\#4118](https://github.com/pypeclub/OpenPype/pull/4118) +- Nuke: loaded nodes set to first tab [\#4114](https://github.com/pypeclub/OpenPype/pull/4114) +- Nuke: load image first frame [\#4113](https://github.com/pypeclub/OpenPype/pull/4113) +- Files Widget: Ignore case sensitivity of extensions [\#4096](https://github.com/pypeclub/OpenPype/pull/4096) +- Webpublisher: extension is lowercased in Setting and in uploaded files [\#4095](https://github.com/pypeclub/OpenPype/pull/4095) +- Publish Report Viewer: Fix small bugs [\#4086](https://github.com/pypeclub/OpenPype/pull/4086) +- Igniter: fix regex to match semver better [\#4085](https://github.com/pypeclub/OpenPype/pull/4085) +- Maya: aov filtering [\#4083](https://github.com/pypeclub/OpenPype/pull/4083) +- Flame/Flare: Loading to multiple batches [\#4080](https://github.com/pypeclub/OpenPype/pull/4080) +- hiero: creator from settings with set maximum [\#4077](https://github.com/pypeclub/OpenPype/pull/4077) +- Nuke: resolve hashes in file name only for frame token [\#4074](https://github.com/pypeclub/OpenPype/pull/4074) +- Publisher: Fix cache of asset docs [\#4070](https://github.com/pypeclub/OpenPype/pull/4070) +- Webpublisher: cleanup wp extract thumbnail [\#4067](https://github.com/pypeclub/OpenPype/pull/4067) +- Settings UI: Locked setting can't bypass lock [\#4066](https://github.com/pypeclub/OpenPype/pull/4066) +- Loader: Fix comparison of repre name [\#4053](https://github.com/pypeclub/OpenPype/pull/4053) +- Deadline: Extract environment subprocess failure [\#4050](https://github.com/pypeclub/OpenPype/pull/4050) + +**🔀 Refactored code** + +- General: Collect entities plugin minor changes [\#4089](https://github.com/pypeclub/OpenPype/pull/4089) +- General: Direct interfaces import [\#4065](https://github.com/pypeclub/OpenPype/pull/4065) + +**Merged pull requests:** + +- Bump loader-utils from 1.4.1 to 1.4.2 in /website [\#4100](https://github.com/pypeclub/OpenPype/pull/4100) +- Online family for Tray Publisher [\#4093](https://github.com/pypeclub/OpenPype/pull/4093) +- Bump loader-utils from 1.4.0 to 1.4.1 in /website [\#4081](https://github.com/pypeclub/OpenPype/pull/4081) +- remove underscore from subset name [\#4059](https://github.com/pypeclub/OpenPype/pull/4059) +- Alembic Loader as Arnold Standin [\#4047](https://github.com/pypeclub/OpenPype/pull/4047) + +## [3.14.6](https://github.com/pypeclub/OpenPype/tree/3.14.6) + +[Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.5...3.14.6) + +### 📖 Documentation + +- Documentation: Minor updates to dev\_requirements.md [\#4025](https://github.com/pypeclub/OpenPype/pull/4025) + +**🆕 New features** + +- Nuke: add 13.2 variant [\#4041](https://github.com/pypeclub/OpenPype/pull/4041) + +**🚀 Enhancements** + +- Publish Report Viewer: Store reports locally on machine [\#4040](https://github.com/pypeclub/OpenPype/pull/4040) +- General: More specific error in burnins script [\#4026](https://github.com/pypeclub/OpenPype/pull/4026) +- General: Extract review does not crash with old settings overrides [\#4023](https://github.com/pypeclub/OpenPype/pull/4023) +- Publisher: Convertors for legacy instances [\#4020](https://github.com/pypeclub/OpenPype/pull/4020) +- workflows: adding milestone creator and assigner [\#4018](https://github.com/pypeclub/OpenPype/pull/4018) +- Publisher: Catch creator errors [\#4015](https://github.com/pypeclub/OpenPype/pull/4015) + +**🐛 Bug fixes** + +- Hiero - effect collection fixes [\#4038](https://github.com/pypeclub/OpenPype/pull/4038) +- Nuke - loader clip correct hash conversion in path [\#4037](https://github.com/pypeclub/OpenPype/pull/4037) +- Maya: Soft fail when applying capture preset [\#4034](https://github.com/pypeclub/OpenPype/pull/4034) +- Igniter: handle missing directory [\#4032](https://github.com/pypeclub/OpenPype/pull/4032) +- StandalonePublisher: Fix thumbnail publishing [\#4029](https://github.com/pypeclub/OpenPype/pull/4029) +- Experimental Tools: Fix publisher import [\#4027](https://github.com/pypeclub/OpenPype/pull/4027) +- Houdini: fix wrong path in ASS loader [\#4016](https://github.com/pypeclub/OpenPype/pull/4016) + +**🔀 Refactored code** + +- General: Import lib functions from lib [\#4017](https://github.com/pypeclub/OpenPype/pull/4017) + ## [3.14.5](https://github.com/pypeclub/OpenPype/tree/3.14.5) (2022-10-24) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.14.4...3.14.5) diff --git a/openpype/action.py b/openpype/action.py index de9cdee010e..15c96404b63 100644 --- a/openpype/action.py +++ b/openpype/action.py @@ -72,17 +72,19 @@ def get_errored_plugins_from_data(context): return get_errored_plugins_from_context(context) -# 'RepairAction' and 'RepairContextAction' were moved to -# 'openpype.pipeline.publish' please change you imports. -# There is no "reasonable" way hot mark these classes as deprecated to show -# warning of wrong import. -# Deprecated since 3.14.* will be removed in 3.16.* class RepairAction(pyblish.api.Action): """Repairs the action To process the repairing this requires a static `repair(instance)` method is available on the plugin. + Deprecated: + 'RepairAction' and 'RepairContextAction' were moved to + 'openpype.pipeline.publish' please change you imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.16.* + """ label = "Repair" on = "failed" # This action is only available on a failed plug-in @@ -103,13 +105,19 @@ def process(self, context, plugin): plugin.repair(instance) -# Deprecated since 3.14.* will be removed in 3.16.* class RepairContextAction(pyblish.api.Action): """Repairs the action To process the repairing this requires a static `repair(instance)` method is available on the plugin. + Deprecated: + 'RepairAction' and 'RepairContextAction' were moved to + 'openpype.pipeline.publish' please change you imports. + There is no "reasonable" way hot mark these classes as deprecated + to show warning of wrong import. Deprecated since 3.14.* will be + removed in 3.16.* + """ label = "Repair" on = "failed" # This action is only available on a failed plug-in diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 43afccf2f14..c415be8816f 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -389,10 +389,11 @@ def get_subset_by_name(project_name, subset_name, asset_id, fields=None): returned if 'None' is passed. Returns: - None: If subset with specified filters was not found. - Dict: Subset document which can be reduced to specified 'fields'. - """ + Union[None, Dict[str, Any]]: None if subset with specified filters was + not found or dict subset document which can be reduced to + specified 'fields'. + """ if not subset_name: return None diff --git a/openpype/hooks/pre_copy_last_published_workfile.py b/openpype/hooks/pre_copy_last_published_workfile.py index 44144e5fffa..26b43c39cb1 100644 --- a/openpype/hooks/pre_copy_last_published_workfile.py +++ b/openpype/hooks/pre_copy_last_published_workfile.py @@ -38,7 +38,7 @@ def execute(self): sync_server = self.modules_manager.get("sync_server") if not sync_server or not sync_server.enabled: - self.log.deubg("Sync server module is not enabled or available") + self.log.debug("Sync server module is not enabled or available") return # Check there is no workfile available diff --git a/openpype/host/interfaces.py b/openpype/host/interfaces.py index 3b2df745d19..999aefd2547 100644 --- a/openpype/host/interfaces.py +++ b/openpype/host/interfaces.py @@ -252,7 +252,7 @@ def save_file(self, dst_path=None): Remove when all usages are replaced. """ - self.save_workfile() + self.save_workfile(dst_path) def open_file(self, filepath): """Deprecated variant of 'open_workfile'. diff --git a/openpype/hosts/blender/plugins/publish/extract_abc_animation.py b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py new file mode 100644 index 00000000000..e141ccaa44f --- /dev/null +++ b/openpype/hosts/blender/plugins/publish/extract_abc_animation.py @@ -0,0 +1,72 @@ +import os + +import bpy + +from openpype.pipeline import publish +from openpype.hosts.blender.api import plugin + + +class ExtractAnimationABC(publish.Extractor): + """Extract as ABC.""" + + label = "Extract Animation ABC" + hosts = ["blender"] + families = ["animation"] + optional = True + + def process(self, instance): + # Define extract output file path + stagingdir = self.staging_dir(instance) + filename = f"{instance.name}.abc" + filepath = os.path.join(stagingdir, filename) + + context = bpy.context + + # Perform extraction + self.log.info("Performing extraction..") + + plugin.deselect_all() + + selected = [] + asset_group = None + + objects = [] + for obj in instance: + if isinstance(obj, bpy.types.Collection): + for child in obj.all_objects: + objects.append(child) + for obj in objects: + children = [o for o in bpy.data.objects if o.parent == obj] + for child in children: + objects.append(child) + + for obj in objects: + obj.select_set(True) + selected.append(obj) + + context = plugin.create_blender_context( + active=asset_group, selected=selected) + + # We export the abc + bpy.ops.wm.alembic_export( + context, + filepath=filepath, + selected=True, + flatten=False + ) + + plugin.deselect_all() + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': filename, + "stagingDir": stagingdir, + } + instance.data["representations"].append(representation) + + self.log.info("Extracted instance '%s' to: %s", + instance.name, representation) diff --git a/openpype/hosts/celaction/__init__.py b/openpype/hosts/celaction/__init__.py index e69de29bb2d..8983d48d7d0 100644 --- a/openpype/hosts/celaction/__init__.py +++ b/openpype/hosts/celaction/__init__.py @@ -0,0 +1,10 @@ +from .addon import ( + CELACTION_ROOT_DIR, + CelactionAddon, +) + + +__all__ = ( + "CELACTION_ROOT_DIR", + "CelactionAddon", +) diff --git a/openpype/hosts/celaction/addon.py b/openpype/hosts/celaction/addon.py new file mode 100644 index 00000000000..9158010011a --- /dev/null +++ b/openpype/hosts/celaction/addon.py @@ -0,0 +1,31 @@ +import os +from openpype.modules import OpenPypeModule, IHostAddon + +CELACTION_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) + + +class CelactionAddon(OpenPypeModule, IHostAddon): + name = "celaction" + host_name = "celaction" + + def initialize(self, module_settings): + self.enabled = True + + def get_launch_hook_paths(self, app): + if app.host_name != self.host_name: + return [] + return [ + os.path.join(CELACTION_ROOT_DIR, "hooks") + ] + + def add_implementation_envs(self, env, _app): + # Set default values if are not already set via settings + defaults = { + "LOGLEVEL": "DEBUG" + } + for key, value in defaults.items(): + if not env.get(key): + env[key] = value + + def get_workfile_extensions(self): + return [".scn"] diff --git a/openpype/hosts/celaction/api/__init__.py b/openpype/hosts/celaction/api/__init__.py deleted file mode 100644 index 8c93d937383..00000000000 --- a/openpype/hosts/celaction/api/__init__.py +++ /dev/null @@ -1 +0,0 @@ -kwargs = None diff --git a/openpype/hosts/celaction/api/cli.py b/openpype/hosts/celaction/api/cli.py deleted file mode 100644 index 88fc11cafb1..00000000000 --- a/openpype/hosts/celaction/api/cli.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -import sys -import copy -import argparse - -import pyblish.api -import pyblish.util - -import openpype.hosts.celaction -from openpype.lib import Logger -from openpype.hosts.celaction import api as celaction -from openpype.tools.utils import host_tools -from openpype.pipeline import install_openpype_plugins - - -log = Logger.get_logger("Celaction_cli_publisher") - -publish_host = "celaction" - -HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__)) -PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") - - -def cli(): - parser = argparse.ArgumentParser(prog="celaction_publish") - - parser.add_argument("--currentFile", - help="Pass file to Context as `currentFile`") - - parser.add_argument("--chunk", - help=("Render chanks on farm")) - - parser.add_argument("--frameStart", - help=("Start of frame range")) - - parser.add_argument("--frameEnd", - help=("End of frame range")) - - parser.add_argument("--resolutionWidth", - help=("Width of resolution")) - - parser.add_argument("--resolutionHeight", - help=("Height of resolution")) - - celaction.kwargs = parser.parse_args(sys.argv[1:]).__dict__ - - -def _prepare_publish_environments(): - """Prepares environments based on request data.""" - env = copy.deepcopy(os.environ) - - project_name = os.getenv("AVALON_PROJECT") - asset_name = os.getenv("AVALON_ASSET") - - env["AVALON_PROJECT"] = project_name - env["AVALON_ASSET"] = asset_name - env["AVALON_TASK"] = os.getenv("AVALON_TASK") - env["AVALON_WORKDIR"] = os.getenv("AVALON_WORKDIR") - env["AVALON_APP"] = f"hosts.{publish_host}" - env["AVALON_APP_NAME"] = "celaction/local" - - env["PYBLISH_HOSTS"] = publish_host - - os.environ.update(env) - - -def main(): - # prepare all environments - _prepare_publish_environments() - - # Registers pype's Global pyblish plugins - install_openpype_plugins() - - if os.path.exists(PUBLISH_PATH): - log.info(f"Registering path: {PUBLISH_PATH}") - pyblish.api.register_plugin_path(PUBLISH_PATH) - - pyblish.api.register_host(publish_host) - - return host_tools.show_publish() - - -if __name__ == "__main__": - cli() - result = main() - sys.exit(not bool(result)) diff --git a/openpype/hosts/celaction/hooks/pre_celaction_registers.py b/openpype/hosts/celaction/hooks/pre_celaction_registers.py deleted file mode 100644 index e49e66f163f..00000000000 --- a/openpype/hosts/celaction/hooks/pre_celaction_registers.py +++ /dev/null @@ -1,122 +0,0 @@ -import os -import shutil -import winreg -from openpype.lib import PreLaunchHook -from openpype.hosts.celaction import api as celaction - - -class CelactionPrelaunchHook(PreLaunchHook): - """ - Bootstrap celacion with pype - """ - workfile_ext = "scn" - app_groups = ["celaction"] - platforms = ["windows"] - - def execute(self): - # Add workfile path to launch arguments - workfile_path = self.workfile_path() - if workfile_path: - self.launch_context.launch_args.append(workfile_path) - - project_name = self.data["project_name"] - asset_name = self.data["asset_name"] - task_name = self.data["task_name"] - - # get publish version of celaction - app = "celaction_publish" - - # setting output parameters - path = r"Software\CelAction\CelAction2D\User Settings" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey( - winreg.HKEY_CURRENT_USER, - "Software\\CelAction\\CelAction2D\\User Settings", 0, - winreg.KEY_ALL_ACCESS) - - # TODO: this will need to be checked more thoroughly - pype_exe = os.getenv("OPENPYPE_EXECUTABLE") - - winreg.SetValueEx(hKey, "SubmitAppTitle", 0, winreg.REG_SZ, pype_exe) - - parameters = [ - "launch", - f"--app {app}", - f"--project {project_name}", - f"--asset {asset_name}", - f"--task {task_name}", - "--currentFile \\\"\"*SCENE*\"\\\"", - "--chunk 10", - "--frameStart *START*", - "--frameEnd *END*", - "--resolutionWidth *X*", - "--resolutionHeight *Y*", - # "--programDir \"'*PROGPATH*'\"" - ] - winreg.SetValueEx(hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, - " ".join(parameters)) - - # setting resolution parameters - path = r"Software\CelAction\CelAction2D\User Settings\Dialogs" - path += r"\SubmitOutput" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) - winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1) - winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, 1920) - winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, 1080) - - # making sure message dialogs don't appear when overwriting - path = r"Software\CelAction\CelAction2D\User Settings\Messages" - path += r"\OverwriteScene" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) - winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6) - winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) - - path = r"Software\CelAction\CelAction2D\User Settings\Messages" - path += r"\SceneSaved" - winreg.CreateKey(winreg.HKEY_CURRENT_USER, path) - hKey = winreg.OpenKey(winreg.HKEY_CURRENT_USER, path, 0, - winreg.KEY_ALL_ACCESS) - winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1) - winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) - - def workfile_path(self): - workfile_path = self.data["last_workfile_path"] - - # copy workfile from template if doesnt exist any on path - if not os.path.exists(workfile_path): - # TODO add ability to set different template workfile path via - # settings - pype_celaction_dir = os.path.dirname(os.path.dirname( - os.path.abspath(celaction.__file__) - )) - template_path = os.path.join( - pype_celaction_dir, - "resources", - "celaction_template_scene.scn" - ) - - if not os.path.exists(template_path): - self.log.warning( - "Couldn't find workfile template file in {}".format( - template_path - ) - ) - return - - self.log.info( - f"Creating workfile from template: \"{template_path}\"" - ) - - # Copy template workfile to new destinantion - shutil.copy2( - os.path.normpath(template_path), - os.path.normpath(workfile_path) - ) - - self.log.info(f"Workfile to open: \"{workfile_path}\"") - - return workfile_path diff --git a/openpype/hosts/celaction/hooks/pre_celaction_setup.py b/openpype/hosts/celaction/hooks/pre_celaction_setup.py new file mode 100644 index 00000000000..62cebf99ed2 --- /dev/null +++ b/openpype/hosts/celaction/hooks/pre_celaction_setup.py @@ -0,0 +1,137 @@ +import os +import shutil +import winreg +import subprocess +from openpype.lib import PreLaunchHook, get_openpype_execute_args +from openpype.hosts.celaction import scripts + +CELACTION_SCRIPTS_DIR = os.path.dirname( + os.path.abspath(scripts.__file__) +) + + +class CelactionPrelaunchHook(PreLaunchHook): + """ + Bootstrap celacion with pype + """ + app_groups = ["celaction"] + platforms = ["windows"] + + def execute(self): + asset_doc = self.data["asset_doc"] + width = asset_doc["data"]["resolutionWidth"] + height = asset_doc["data"]["resolutionHeight"] + + # Add workfile path to launch arguments + workfile_path = self.workfile_path() + if workfile_path: + self.launch_context.launch_args.append(workfile_path) + + # setting output parameters + path_user_settings = "\\".join([ + "Software", "CelAction", "CelAction2D", "User Settings" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_user_settings) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_user_settings, 0, + winreg.KEY_ALL_ACCESS + ) + + path_to_cli = os.path.join(CELACTION_SCRIPTS_DIR, "publish_cli.py") + subproces_args = get_openpype_execute_args("run", path_to_cli) + openpype_executable = subproces_args.pop(0) + + winreg.SetValueEx( + hKey, + "SubmitAppTitle", + 0, + winreg.REG_SZ, + openpype_executable + ) + + parameters = subproces_args + [ + "--currentFile", "*SCENE*", + "--chunk", "*CHUNK*", + "--frameStart", "*START*", + "--frameEnd", "*END*", + "--resolutionWidth", "*X*", + "--resolutionHeight", "*Y*" + ] + + winreg.SetValueEx( + hKey, "SubmitParametersTitle", 0, winreg.REG_SZ, + subprocess.list2cmdline(parameters) + ) + + # setting resolution parameters + path_submit = "\\".join([ + path_user_settings, "Dialogs", "SubmitOutput" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_submit) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_submit, 0, + winreg.KEY_ALL_ACCESS + ) + winreg.SetValueEx(hKey, "SaveScene", 0, winreg.REG_DWORD, 1) + winreg.SetValueEx(hKey, "CustomX", 0, winreg.REG_DWORD, width) + winreg.SetValueEx(hKey, "CustomY", 0, winreg.REG_DWORD, height) + + # making sure message dialogs don't appear when overwriting + path_overwrite_scene = "\\".join([ + path_user_settings, "Messages", "OverwriteScene" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_overwrite_scene) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_overwrite_scene, 0, + winreg.KEY_ALL_ACCESS + ) + winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 6) + winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) + + # set scane as not saved + path_scene_saved = "\\".join([ + path_user_settings, "Messages", "SceneSaved" + ]) + winreg.CreateKey(winreg.HKEY_CURRENT_USER, path_scene_saved) + hKey = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, path_scene_saved, 0, + winreg.KEY_ALL_ACCESS + ) + winreg.SetValueEx(hKey, "Result", 0, winreg.REG_DWORD, 1) + winreg.SetValueEx(hKey, "Valid", 0, winreg.REG_DWORD, 1) + + def workfile_path(self): + workfile_path = self.data["last_workfile_path"] + + # copy workfile from template if doesnt exist any on path + if not os.path.exists(workfile_path): + # TODO add ability to set different template workfile path via + # settings + openpype_celaction_dir = os.path.dirname(CELACTION_SCRIPTS_DIR) + template_path = os.path.join( + openpype_celaction_dir, + "resources", + "celaction_template_scene.scn" + ) + + if not os.path.exists(template_path): + self.log.warning( + "Couldn't find workfile template file in {}".format( + template_path + ) + ) + return + + self.log.info( + f"Creating workfile from template: \"{template_path}\"" + ) + + # Copy template workfile to new destinantion + shutil.copy2( + os.path.normpath(template_path), + os.path.normpath(workfile_path) + ) + + self.log.info(f"Workfile to open: \"{workfile_path}\"") + + return workfile_path diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py index 15c5ddaf1c1..bf97dd744b6 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_cli_kwargs.py @@ -1,5 +1,7 @@ import pyblish.api -from openpype.hosts.celaction import api as celaction +import argparse +import sys +from pprint import pformat class CollectCelactionCliKwargs(pyblish.api.Collector): @@ -9,15 +11,31 @@ class CollectCelactionCliKwargs(pyblish.api.Collector): order = pyblish.api.Collector.order - 0.1 def process(self, context): - kwargs = celaction.kwargs.copy() + parser = argparse.ArgumentParser(prog="celaction") + parser.add_argument("--currentFile", + help="Pass file to Context as `currentFile`") + parser.add_argument("--chunk", + help=("Render chanks on farm")) + parser.add_argument("--frameStart", + help=("Start of frame range")) + parser.add_argument("--frameEnd", + help=("End of frame range")) + parser.add_argument("--resolutionWidth", + help=("Width of resolution")) + parser.add_argument("--resolutionHeight", + help=("Height of resolution")) + passing_kwargs = parser.parse_args(sys.argv[1:]).__dict__ - self.log.info("Storing kwargs: %s" % kwargs) - context.set_data("kwargs", kwargs) + self.log.info("Storing kwargs ...") + self.log.debug("_ passing_kwargs: {}".format(pformat(passing_kwargs))) + + # set kwargs to context data + context.set_data("passingKwargs", passing_kwargs) # get kwargs onto context data as keys with values - for k, v in kwargs.items(): + for k, v in passing_kwargs.items(): self.log.info(f"Setting `{k}` to instance.data with value: `{v}`") if k in ["frameStart", "frameEnd"]: - context.data[k] = kwargs[k] = int(v) + context.data[k] = passing_kwargs[k] = int(v) else: context.data[k] = v diff --git a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py index 1d2d9da1af2..35ac7fc2641 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py +++ b/openpype/hosts/celaction/plugins/publish/collect_celaction_instances.py @@ -36,7 +36,8 @@ def process(self, context): "version": version } - celaction_kwargs = context.data.get("kwargs", {}) + celaction_kwargs = context.data.get( + "passingKwargs", {}) if celaction_kwargs: shared_instance_data.update(celaction_kwargs) @@ -52,8 +53,8 @@ def process(self, context): "subset": subset, "label": scene_file, "family": family, - "families": [family, "ftrack"], - "representations": list() + "families": [], + "representations": [] }) # adding basic script data @@ -72,7 +73,6 @@ def process(self, context): self.log.info('Publishing Celaction workfile') # render instance - family = "render.farm" subset = f"render{task}Main" instance = context.create_instance(name=subset) # getting instance state @@ -81,8 +81,8 @@ def process(self, context): # add assetEntity data into instance instance.data.update({ "label": "{} - farm".format(subset), - "family": family, - "families": [family], + "family": "render.farm", + "families": [], "subset": subset }) diff --git a/openpype/hosts/celaction/plugins/publish/collect_render_path.py b/openpype/hosts/celaction/plugins/publish/collect_render_path.py index 9cbb0e48805..f6db6c000d5 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_render_path.py +++ b/openpype/hosts/celaction/plugins/publish/collect_render_path.py @@ -11,28 +11,31 @@ class CollectRenderPath(pyblish.api.InstancePlugin): families = ["render.farm"] # Presets - anatomy_render_key = None - publish_render_metadata = None + output_extension = "png" + anatomy_template_key_render_files = None + anatomy_template_key_metadata = None def process(self, instance): anatomy = instance.context.data["anatomy"] anatomy_data = copy.deepcopy(instance.data["anatomyData"]) - anatomy_data["family"] = "render" padding = anatomy.templates.get("frame_padding", 4) anatomy_data.update({ "frame": f"%0{padding}d", - "representation": "png" + "family": "render", + "representation": self.output_extension, + "ext": self.output_extension }) anatomy_filled = anatomy.format(anatomy_data) # get anatomy rendering keys - anatomy_render_key = self.anatomy_render_key or "render" - publish_render_metadata = self.publish_render_metadata or "render" + r_anatomy_key = self.anatomy_template_key_render_files + m_anatomy_key = self.anatomy_template_key_metadata # get folder and path for rendering images from celaction - render_dir = anatomy_filled[anatomy_render_key]["folder"] - render_path = anatomy_filled[anatomy_render_key]["path"] + render_dir = anatomy_filled[r_anatomy_key]["folder"] + render_path = anatomy_filled[r_anatomy_key]["path"] + self.log.debug("__ render_path: `{}`".format(render_path)) # create dir if it doesnt exists try: @@ -46,9 +49,9 @@ def process(self, instance): instance.data["path"] = render_path # get anatomy for published renders folder path - if anatomy_filled.get(publish_render_metadata): + if anatomy_filled.get(m_anatomy_key): instance.data["publishRenderMetadataFolder"] = anatomy_filled[ - publish_render_metadata]["folder"] + m_anatomy_key]["folder"] self.log.info("Metadata render path: `{}`".format( instance.data["publishRenderMetadataFolder"] )) diff --git a/openpype/hosts/celaction/scripts/__init__.py b/openpype/hosts/celaction/scripts/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/openpype/hosts/celaction/scripts/publish_cli.py b/openpype/hosts/celaction/scripts/publish_cli.py new file mode 100644 index 00000000000..39d3f1a94db --- /dev/null +++ b/openpype/hosts/celaction/scripts/publish_cli.py @@ -0,0 +1,37 @@ +import os +import sys + +import pyblish.api +import pyblish.util + +import openpype.hosts.celaction +from openpype.lib import Logger +from openpype.tools.utils import host_tools +from openpype.pipeline import install_openpype_plugins + + +log = Logger.get_logger("celaction") + +PUBLISH_HOST = "celaction" +HOST_DIR = os.path.dirname(os.path.abspath(openpype.hosts.celaction.__file__)) +PLUGINS_DIR = os.path.join(HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") + + +def main(): + # Registers pype's Global pyblish plugins + install_openpype_plugins() + + if os.path.exists(PUBLISH_PATH): + log.info(f"Registering path: {PUBLISH_PATH}") + pyblish.api.register_plugin_path(PUBLISH_PATH) + + pyblish.api.register_host(PUBLISH_HOST) + pyblish.api.register_target("local") + + return host_tools.show_publish() + + +if __name__ == "__main__": + result = main() + sys.exit(not bool(result)) diff --git a/openpype/hosts/flame/api/plugin.py b/openpype/hosts/flame/api/plugin.py index 092ce9d106f..ca113fd98ae 100644 --- a/openpype/hosts/flame/api/plugin.py +++ b/openpype/hosts/flame/api/plugin.py @@ -4,13 +4,13 @@ from copy import deepcopy from xml.etree import ElementTree as ET +import qargparse from Qt import QtCore, QtWidgets -import qargparse from openpype import style -from openpype.settings import get_current_project_settings from openpype.lib import Logger from openpype.pipeline import LegacyCreator, LoaderPlugin +from openpype.settings import get_current_project_settings from . import constants from . import lib as flib @@ -596,18 +596,28 @@ def _convert_to_marker_data(self): if not hero_track and self.vertical_sync: # driving layer is set as negative match for (_in, _out), hero_data in self.vertical_clip_match.items(): - hero_data.update({"heroTrack": False}) - if _in == self.clip_in and _out == self.clip_out: + """ + Since only one instance of hero clip is expected in + `self.vertical_clip_match`, this will loop only once + until none hero clip will be matched with hero clip. + + `tag_hierarchy_data` will be set only once for every + clip which is not hero clip. + """ + _hero_data = deepcopy(hero_data) + _hero_data.update({"heroTrack": False}) + if _in <= self.clip_in and _out >= self.clip_out: data_subset = hero_data["subset"] # add track index in case duplicity of names in hero data if self.subset in data_subset: - hero_data["subset"] = self.subset + str( + _hero_data["subset"] = self.subset + str( self.track_index) # in case track name and subset name is the same then add if self.subset_name == self.track_name: - hero_data["subset"] = self.subset + _hero_data["subset"] = self.subset # assing data to return hierarchy data to tag - tag_hierarchy_data = hero_data + tag_hierarchy_data = _hero_data + break # add data to return data dict self.marker_data.update(tag_hierarchy_data) @@ -690,6 +700,54 @@ class ClipLoader(LoaderPlugin): ) ] + _mapping = None + + def get_colorspace(self, context): + """Get colorspace name + + Look either to version data or representation data. + + Args: + context (dict): version context data + + Returns: + str: colorspace name or None + """ + version = context['version'] + version_data = version.get("data", {}) + colorspace = version_data.get( + "colorspace", None + ) + + if ( + not colorspace + or colorspace == "Unknown" + ): + colorspace = context["representation"]["data"].get( + "colorspace", None) + + return colorspace + + @classmethod + def get_native_colorspace(cls, input_colorspace): + """Return native colorspace name. + + Args: + input_colorspace (str | None): colorspace name + + Returns: + str: native colorspace name defined in mapping or None + """ + if not cls._mapping: + settings = get_current_project_settings()["flame"] + mapping = settings["imageio"]["profilesMapping"]["inputs"] + cls._mapping = { + input["ocioName"]: input["flameName"] + for input in mapping + } + + return cls._mapping.get(input_colorspace) + class OpenClipSolver(flib.MediaInfoFile): create_new_clip = False diff --git a/openpype/hosts/flame/plugins/load/load_clip.py b/openpype/hosts/flame/plugins/load/load_clip.py index 0843dde76ad..f8cb7b3e117 100644 --- a/openpype/hosts/flame/plugins/load/load_clip.py +++ b/openpype/hosts/flame/plugins/load/load_clip.py @@ -36,14 +36,15 @@ def load(self, context, name, namespace, options): version = context['version'] version_data = version.get("data", {}) version_name = version.get("name", None) - colorspace = version_data.get("colorspace", None) + colorspace = self.get_colorspace(context) + clip_name = StringTemplate(self.clip_name_template).format( context["representation"]["context"]) - # TODO: settings in imageio # convert colorspace with ocio to flame mapping # in imageio flame section - colorspace = colorspace + colorspace = self.get_native_colorspace(colorspace) + self.log.info("Loading with colorspace: `{}`".format(colorspace)) # create workfile path workfile_dir = os.environ["AVALON_WORKDIR"] diff --git a/openpype/hosts/flame/plugins/load/load_clip_batch.py b/openpype/hosts/flame/plugins/load/load_clip_batch.py index 17ad8075e44..048ac194313 100644 --- a/openpype/hosts/flame/plugins/load/load_clip_batch.py +++ b/openpype/hosts/flame/plugins/load/load_clip_batch.py @@ -35,7 +35,7 @@ def load(self, context, name, namespace, options): version = context['version'] version_data = version.get("data", {}) version_name = version.get("name", None) - colorspace = version_data.get("colorspace", None) + colorspace = self.get_colorspace(context) # in case output is not in context replace key to representation if not context["representation"]["context"].get("output"): @@ -47,10 +47,10 @@ def load(self, context, name, namespace, options): clip_name = StringTemplate(self.clip_name_template).format( formating_data) - # TODO: settings in imageio # convert colorspace with ocio to flame mapping # in imageio flame section - colorspace = colorspace + colorspace = self.get_native_colorspace(colorspace) + self.log.info("Loading with colorspace: `{}`".format(colorspace)) # create workfile path workfile_dir = options.get("workdir") or os.environ["AVALON_WORKDIR"] diff --git a/openpype/hosts/hiero/addon.py b/openpype/hosts/hiero/addon.py index f5bb94dbaaa..1cc7a8637ed 100644 --- a/openpype/hosts/hiero/addon.py +++ b/openpype/hosts/hiero/addon.py @@ -27,7 +27,12 @@ def add_implementation_envs(self, env, _app): new_hiero_paths.append(norm_path) env["HIERO_PLUGIN_PATH"] = os.pathsep.join(new_hiero_paths) + # Remove auto screen scale factor for Qt + # - let Hiero decide it's value env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Remove tkinter library paths if are set + env.pop("TK_LIBRARY", None) + env.pop("TCL_LIBRARY", None) # Add vendor to PYTHONPATH python_path = env["PYTHONPATH"] diff --git a/openpype/hosts/hiero/api/__init__.py b/openpype/hosts/hiero/api/__init__.py index 781f846bbe8..1fa40c9f745 100644 --- a/openpype/hosts/hiero/api/__init__.py +++ b/openpype/hosts/hiero/api/__init__.py @@ -30,9 +30,15 @@ get_timeline_selection, get_current_track, get_track_item_tags, + get_track_openpype_tag, + set_track_openpype_tag, + get_track_openpype_data, get_track_item_pype_tag, set_track_item_pype_tag, get_track_item_pype_data, + get_trackitem_openpype_tag, + set_trackitem_openpype_tag, + get_trackitem_openpype_data, set_publish_attribute, get_publish_attribute, imprint, @@ -85,9 +91,12 @@ "get_timeline_selection", "get_current_track", "get_track_item_tags", - "get_track_item_pype_tag", - "set_track_item_pype_tag", - "get_track_item_pype_data", + "get_track_openpype_tag", + "set_track_openpype_tag", + "get_track_openpype_data", + "get_trackitem_openpype_tag", + "set_trackitem_openpype_tag", + "get_trackitem_openpype_data", "set_publish_attribute", "get_publish_attribute", "imprint", @@ -99,6 +108,10 @@ "apply_colorspace_project", "apply_colorspace_clips", "get_sequence_pattern_and_padding", + # depricated + "get_track_item_pype_tag", + "set_track_item_pype_tag", + "get_track_item_pype_data", # plugins "CreatorWidget", diff --git a/openpype/hosts/hiero/api/lib.py b/openpype/hosts/hiero/api/lib.py index e5d35945afc..7f0cf8149a2 100644 --- a/openpype/hosts/hiero/api/lib.py +++ b/openpype/hosts/hiero/api/lib.py @@ -7,11 +7,15 @@ import re import sys import platform +import functools +import warnings +import json import ast +import secrets import shutil import hiero -from Qt import QtWidgets +from Qt import QtWidgets, QtCore, QtXml from openpype.client import get_project from openpype.settings import get_project_settings @@ -20,15 +24,51 @@ from openpype.lib import Logger from . import tags -try: - from PySide.QtCore import QFile, QTextStream - from PySide.QtXml import QDomDocument -except ImportError: - from PySide2.QtCore import QFile, QTextStream - from PySide2.QtXml import QDomDocument -# from opentimelineio import opentime -# from pprint import pformat +class DeprecatedWarning(DeprecationWarning): + pass + + +def deprecated(new_destination): + """Mark functions as deprecated. + + It will result in a warning being emitted when the function is used. + """ + + func = None + if callable(new_destination): + func = new_destination + new_destination = None + + def _decorator(decorated_func): + if new_destination is None: + warning_message = ( + " Please check content of deprecated function to figure out" + " possible replacement." + ) + else: + warning_message = " Please replace your usage with '{}'.".format( + new_destination + ) + + @functools.wraps(decorated_func) + def wrapper(*args, **kwargs): + warnings.simplefilter("always", DeprecatedWarning) + warnings.warn( + ( + "Call to deprecated function '{}'" + "\nFunction was moved or removed.{}" + ).format(decorated_func.__name__, warning_message), + category=DeprecatedWarning, + stacklevel=4 + ) + return decorated_func(*args, **kwargs) + return wrapper + + if func is None: + return _decorator + return _decorator(func) + log = Logger.get_logger(__name__) @@ -301,7 +341,52 @@ def get_track_item_tags(track_item): return returning_tag_data -def get_track_item_pype_tag(track_item): +def _get_tag_unique_hash(): + # sourcery skip: avoid-builtin-shadow + return secrets.token_hex(nbytes=4) + + +def set_track_openpype_tag(track, data=None): + """ + Set openpype track tag to input track object. + + Attributes: + track (hiero.core.VideoTrack): hiero object + + Returns: + hiero.core.Tag + """ + data = data or {} + + # basic Tag's attribute + tag_data = { + "editable": "0", + "note": "OpenPype data container", + "icon": "openpype_icon.png", + "metadata": dict(data.items()) + } + # get available pype tag if any + _tag = get_track_openpype_tag(track) + + if _tag: + # it not tag then create one + tag = tags.update_tag(_tag, tag_data) + else: + # if pype tag available then update with input data + tag = tags.create_tag( + "{}_{}".format( + self.pype_tag_name, + _get_tag_unique_hash() + ), + tag_data + ) + # add it to the input track item + track.addTag(tag) + + return tag + + +def get_track_openpype_tag(track): """ Get pype track item tag created by creator or loader plugin. @@ -312,22 +397,94 @@ def get_track_item_pype_tag(track_item): hiero.core.Tag: hierarchy, orig clip attributes """ # get all tags from track item - _tags = track_item.tags() + _tags = track.tags() if not _tags: return None for tag in _tags: # return only correct tag defined by global name - if tag.name() == self.pype_tag_name: + if self.pype_tag_name in tag.name(): return tag +def get_track_openpype_data(track, container_name=None): + """ + Get track's openpype tag data. + + Attributes: + trackItem (hiero.core.VideoTrack): hiero object + + Returns: + dict: data found on pype tag + """ + return_data = {} + # get pype data tag from track item + tag = get_track_openpype_tag(track) + + if not tag: + return None + + # get tag metadata attribute + tag_data = deepcopy(dict(tag.metadata())) + + for obj_name, obj_data in tag_data.items(): + obj_name = obj_name.replace("tag.", "") + + if obj_name in ["applieswhole", "note", "label"]: + continue + return_data[obj_name] = json.loads(obj_data) + + return ( + return_data[container_name] + if container_name + else return_data + ) + + +@deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_tag") +def get_track_item_pype_tag(track_item): + # backward compatibility alias + return get_trackitem_openpype_tag(track_item) + + +@deprecated("openpype.hosts.hiero.api.lib.set_trackitem_openpype_tag") def set_track_item_pype_tag(track_item, data=None): + # backward compatibility alias + return set_trackitem_openpype_tag(track_item, data) + + +@deprecated("openpype.hosts.hiero.api.lib.get_trackitem_openpype_data") +def get_track_item_pype_data(track_item): + # backward compatibility alias + return get_trackitem_openpype_data(track_item) + + +def get_trackitem_openpype_tag(track_item): """ - Set pype track item tag to input track_item. + Get pype track item tag created by creator or loader plugin. Attributes: trackItem (hiero.core.TrackItem): hiero object + Returns: + hiero.core.Tag: hierarchy, orig clip attributes + """ + # get all tags from track item + _tags = track_item.tags() + if not _tags: + return None + for tag in _tags: + # return only correct tag defined by global name + if self.pype_tag_name in tag.name(): + return tag + + +def set_trackitem_openpype_tag(track_item, data=None): + """ + Set openpype track tag to input track object. + + Attributes: + track (hiero.core.VideoTrack): hiero object + Returns: hiero.core.Tag """ @@ -341,21 +498,26 @@ def set_track_item_pype_tag(track_item, data=None): "metadata": dict(data.items()) } # get available pype tag if any - _tag = get_track_item_pype_tag(track_item) - + _tag = get_trackitem_openpype_tag(track_item) if _tag: # it not tag then create one tag = tags.update_tag(_tag, tag_data) else: # if pype tag available then update with input data - tag = tags.create_tag(self.pype_tag_name, tag_data) + tag = tags.create_tag( + "{}_{}".format( + self.pype_tag_name, + _get_tag_unique_hash() + ), + tag_data + ) # add it to the input track item track_item.addTag(tag) return tag -def get_track_item_pype_data(track_item): +def get_trackitem_openpype_data(track_item): """ Get track item's pype tag data. @@ -367,7 +529,7 @@ def get_track_item_pype_data(track_item): """ data = {} # get pype data tag from track item - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) if not tag: return None @@ -420,7 +582,7 @@ def imprint(track_item, data=None): """ data = data or {} - tag = set_track_item_pype_tag(track_item, data) + tag = set_trackitem_openpype_tag(track_item, data) # add publish attribute set_publish_attribute(tag, True) @@ -832,22 +994,22 @@ def set_selected_track_items(track_items_list, sequence=None): def _read_doc_from_path(path): - # reading QDomDocument from HROX path - hrox_file = QFile(path) - if not hrox_file.open(QFile.ReadOnly): + # reading QtXml.QDomDocument from HROX path + hrox_file = QtCore.QFile(path) + if not hrox_file.open(QtCore.QFile.ReadOnly): raise RuntimeError("Failed to open file for reading") - doc = QDomDocument() + doc = QtXml.QDomDocument() doc.setContent(hrox_file) hrox_file.close() return doc def _write_doc_to_path(doc, path): - # write QDomDocument to path as HROX - hrox_file = QFile(path) - if not hrox_file.open(QFile.WriteOnly): + # write QtXml.QDomDocument to path as HROX + hrox_file = QtCore.QFile(path) + if not hrox_file.open(QtCore.QFile.WriteOnly): raise RuntimeError("Failed to open file for writing") - stream = QTextStream(hrox_file) + stream = QtCore.QTextStream(hrox_file) doc.save(stream, 1) hrox_file.close() @@ -1030,7 +1192,7 @@ def sync_clip_name_to_data_asset(track_items_list): # get name and data ti_name = track_item.name() - data = get_track_item_pype_data(track_item) + data = get_trackitem_openpype_data(track_item) # ignore if no data on the clip or not publish instance if not data: @@ -1042,10 +1204,10 @@ def sync_clip_name_to_data_asset(track_items_list): if data["asset"] != ti_name: data["asset"] = ti_name # remove the original tag - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) track_item.removeTag(tag) # create new tag with updated data - set_track_item_pype_tag(track_item, data) + set_trackitem_openpype_tag(track_item, data) print("asset was changed in clip: {}".format(ti_name)) @@ -1083,10 +1245,10 @@ def check_inventory_versions(track_items=None): project_name = legacy_io.active_project() filter_result = filter_containers(containers, project_name) for container in filter_result.latest: - set_track_color(container["_track_item"], clip_color) + set_track_color(container["_item"], clip_color) for container in filter_result.outdated: - set_track_color(container["_track_item"], clip_color_last) + set_track_color(container["_item"], clip_color_last) def selection_changed_timeline(event): diff --git a/openpype/hosts/hiero/api/pipeline.py b/openpype/hosts/hiero/api/pipeline.py index ea61dc4785a..4ab73e7d194 100644 --- a/openpype/hosts/hiero/api/pipeline.py +++ b/openpype/hosts/hiero/api/pipeline.py @@ -1,6 +1,7 @@ """ Basic avalon integration """ +from copy import deepcopy import os import contextlib from collections import OrderedDict @@ -17,6 +18,7 @@ ) from openpype.tools.utils import host_tools from . import lib, menu, events +import hiero log = Logger.get_logger(__name__) @@ -106,7 +108,7 @@ def containerise(track_item, data_imprint.update({k: v}) log.debug("_ data_imprint: {}".format(data_imprint)) - lib.set_track_item_pype_tag(track_item, data_imprint) + lib.set_trackitem_openpype_tag(track_item, data_imprint) return track_item @@ -123,79 +125,131 @@ def ls(): """ # get all track items from current timeline - all_track_items = lib.get_track_items() + all_items = lib.get_track_items() - for track_item in all_track_items: - container = parse_container(track_item) - if container: - yield container + # append all video tracks + for track in lib.get_current_sequence(): + if type(track) != hiero.core.VideoTrack: + continue + all_items.append(track) + for item in all_items: + container_data = parse_container(item) -def parse_container(track_item, validate=True): + if isinstance(container_data, list): + for _c in container_data: + yield _c + elif container_data: + yield container_data + + +def parse_container(item, validate=True): """Return container data from track_item's pype tag. Args: - track_item (hiero.core.TrackItem): A containerised track item. + item (hiero.core.TrackItem or hiero.core.VideoTrack): + A containerised track item. validate (bool)[optional]: validating with avalon scheme Returns: dict: The container schema data for input containerized track item. """ - # convert tag metadata to normal keys names - data = lib.get_track_item_pype_data(track_item) - if ( - not data - or data.get("id") != "pyblish.avalon.container" - ): - return + def data_to_container(item, data): + if ( + not data + or data.get("id") != "pyblish.avalon.container" + ): + return - if validate and data and data.get("schema"): - schema.validate(data) + if validate and data and data.get("schema"): + schema.validate(data) - if not isinstance(data, dict): - return + if not isinstance(data, dict): + return - # If not all required data return the empty container - required = ['schema', 'id', 'name', - 'namespace', 'loader', 'representation'] + # If not all required data return the empty container + required = ['schema', 'id', 'name', + 'namespace', 'loader', 'representation'] - if not all(key in data for key in required): - return + if any(key not in data for key in required): + return - container = {key: data[key] for key in required} + container = {key: data[key] for key in required} - container["objectName"] = track_item.name() + container["objectName"] = item.name() - # Store reference to the node object - container["_track_item"] = track_item + # Store reference to the node object + container["_item"] = item + return container + + # convert tag metadata to normal keys names + if type(item) == hiero.core.VideoTrack: + return_list = [] + _data = lib.get_track_openpype_data(item) + + if not _data: + return + # convert the data to list and validate them + for _, obj_data in _data.items(): + cotnainer = data_to_container(item, obj_data) + return_list.append(cotnainer) + return return_list + else: + _data = lib.get_trackitem_openpype_data(item) + return data_to_container(item, _data) + + +def _update_container_data(container, data): + for key in container: + try: + container[key] = data[key] + except KeyError: + pass return container -def update_container(track_item, data=None): - """Update container data to input track_item's pype tag. +def update_container(item, data=None): + """Update container data to input track_item or track's + openpype tag. Args: - track_item (hiero.core.TrackItem): A containerised track item. + item (hiero.core.TrackItem or hiero.core.VideoTrack): + A containerised track item. data (dict)[optional]: dictionery with data to be updated Returns: bool: True if container was updated correctly """ - data = data or dict() - container = lib.get_track_item_pype_data(track_item) + data = data or {} + data = deepcopy(data) - for _key, _value in container.items(): - try: - container[_key] = data[_key] - except KeyError: - pass + if type(item) == hiero.core.VideoTrack: + # form object data for test + object_name = data["objectName"] + + # get all available containers + containers = lib.get_track_openpype_data(item) + container = lib.get_track_openpype_data(item, object_name) + + containers = deepcopy(containers) + container = deepcopy(container) + + # update data in container + updated_container = _update_container_data(container, data) + # merge updated container back to containers + containers.update({object_name: updated_container}) + + return bool(lib.set_track_openpype_tag(item, containers)) + else: + container = lib.get_trackitem_openpype_data(item) + updated_container = _update_container_data(container, data) - log.info("Updating container: `{}`".format(track_item.name())) - return bool(lib.set_track_item_pype_tag(track_item, container)) + log.info("Updating container: `{}`".format(item.name())) + return bool(lib.set_trackitem_openpype_tag(item, updated_container)) def launch_workfiles_app(*args): @@ -272,11 +326,11 @@ def on_pyblish_instance_toggled(instance, old_value, new_value): instance, old_value, new_value)) from openpype.hosts.hiero.api import ( - get_track_item_pype_tag, + get_trackitem_openpype_tag, set_publish_attribute ) # Whether instances should be passthrough based on new value track_item = instance.data["item"] - tag = get_track_item_pype_tag(track_item) + tag = get_trackitem_openpype_tag(track_item) set_publish_attribute(tag, new_value) diff --git a/openpype/hosts/hiero/api/tags.py b/openpype/hosts/hiero/api/tags.py index fac26da03a9..cb7bc14edb1 100644 --- a/openpype/hosts/hiero/api/tags.py +++ b/openpype/hosts/hiero/api/tags.py @@ -1,3 +1,4 @@ +import json import re import os import hiero @@ -85,17 +86,16 @@ def update_tag(tag, data): # get metadata key from data data_mtd = data.get("metadata", {}) - # due to hiero bug we have to make sure keys which are not existent in - # data are cleared of value by `None` - for _mk in mtd.dict().keys(): - if _mk.replace("tag.", "") not in data_mtd.keys(): - mtd.setValue(_mk, str(None)) - # set all data metadata to tag metadata - for k, v in data_mtd.items(): + for _k, _v in data_mtd.items(): + value = str(_v) + if type(_v) == dict: + value = json.dumps(_v) + + # set the value mtd.setValue( - "tag.{}".format(str(k)), - str(v) + "tag.{}".format(str(_k)), + value ) # set note description of tag diff --git a/openpype/hosts/hiero/plugins/load/load_effects.py b/openpype/hosts/hiero/plugins/load/load_effects.py new file mode 100644 index 00000000000..a3fcd63b5be --- /dev/null +++ b/openpype/hosts/hiero/plugins/load/load_effects.py @@ -0,0 +1,308 @@ +import json +from collections import OrderedDict +import six + +from openpype.client import ( + get_version_by_id +) + +from openpype.pipeline import ( + AVALON_CONTAINER_ID, + load, + legacy_io, + get_representation_path +) +from openpype.hosts.hiero import api as phiero +from openpype.lib import Logger + + +class LoadEffects(load.LoaderPlugin): + """Loading colorspace soft effect exported from nukestudio""" + + representations = ["effectJson"] + families = ["effect"] + + label = "Load Effects" + order = 0 + icon = "cc" + color = "white" + + log = Logger.get_logger(__name__) + + def load(self, context, name, namespace, data): + """ + Loading function to get the soft effects to particular read node + + Arguments: + context (dict): context of version + name (str): name of the version + namespace (str): asset name + data (dict): compulsory attribute > not used + + Returns: + nuke node: containerised nuke node object + """ + active_sequence = phiero.get_current_sequence() + active_track = phiero.get_current_track( + active_sequence, "Loaded_{}".format(name)) + + # get main variables + namespace = namespace or context["asset"]["name"] + object_name = "{}_{}".format(name, namespace) + clip_in = context["asset"]["data"]["clipIn"] + clip_out = context["asset"]["data"]["clipOut"] + + data_imprint = { + "objectName": object_name, + "children_names": [] + } + + # getting file path + file = self.fname.replace("\\", "/") + + if self._shared_loading( + file, + active_track, + clip_in, + clip_out, + data_imprint + ): + self.containerise( + active_track, + name=name, + namespace=namespace, + object_name=object_name, + context=context, + loader=self.__class__.__name__, + data=data_imprint) + + def _shared_loading( + self, + file, + active_track, + clip_in, + clip_out, + data_imprint, + update=False + ): + # getting data from json file with unicode conversion + with open(file, "r") as f: + json_f = {self.byteify(key): self.byteify(value) + for key, value in json.load(f).items()} + + # get correct order of nodes by positions on track and subtrack + nodes_order = self.reorder_nodes(json_f) + + used_subtracks = { + stitem.name(): stitem + for stitem in phiero.flatten(active_track.subTrackItems()) + } + + loaded = False + for index_order, (ef_name, ef_val) in enumerate(nodes_order.items()): + new_name = "{}_loaded".format(ef_name) + if new_name not in used_subtracks: + effect_track_item = active_track.createEffect( + effectType=ef_val["class"], + timelineIn=clip_in, + timelineOut=clip_out, + subTrackIndex=index_order + + ) + effect_track_item.setName(new_name) + else: + effect_track_item = used_subtracks[new_name] + + node = effect_track_item.node() + for knob_name, knob_value in ef_val["node"].items(): + if ( + not knob_value + or knob_name == "name" + ): + continue + + try: + # assume list means animation + # except 4 values could be RGBA or vector + if isinstance(knob_value, list) and len(knob_value) > 4: + node[knob_name].setAnimated() + for i, value in enumerate(knob_value): + if isinstance(value, list): + # list can have vector animation + for ci, cv in enumerate(value): + node[knob_name].setValueAt( + cv, + (clip_in + i), + ci + ) + else: + # list is single values + node[knob_name].setValueAt( + value, + (clip_in + i) + ) + else: + node[knob_name].setValue(knob_value) + except NameError: + self.log.warning("Knob: {} cannot be set".format( + knob_name)) + + # register all loaded children + data_imprint["children_names"].append(new_name) + + # make sure containerisation will happen + loaded = True + + return loaded + + def update(self, container, representation): + """ Updating previously loaded effects + """ + active_track = container["_item"] + file = get_representation_path(representation).replace("\\", "/") + + # get main variables + name = container['name'] + namespace = container['namespace'] + + # get timeline in out data + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + version_data = version_doc["data"] + clip_in = version_data["clipIn"] + clip_out = version_data["clipOut"] + + object_name = "{}_{}".format(name, namespace) + + # Disable previously created nodes + used_subtracks = { + stitem.name(): stitem + for stitem in phiero.flatten(active_track.subTrackItems()) + } + container = phiero.get_track_openpype_data( + active_track, object_name + ) + + loaded_subtrack_items = container["children_names"] + for loaded_stitem in loaded_subtrack_items: + if loaded_stitem not in used_subtracks: + continue + item_to_remove = used_subtracks.pop(loaded_stitem) + # TODO: find a way to erase nodes + self.log.debug( + "This node needs to be removed: {}".format(item_to_remove)) + + data_imprint = { + "objectName": object_name, + "name": name, + "representation": str(representation["_id"]), + "children_names": [] + } + + if self._shared_loading( + file, + active_track, + clip_in, + clip_out, + data_imprint, + update=True + ): + return phiero.update_container(active_track, data_imprint) + + def reorder_nodes(self, data): + new_order = OrderedDict() + trackNums = [v["trackIndex"] for k, v in data.items() + if isinstance(v, dict)] + subTrackNums = [v["subTrackIndex"] for k, v in data.items() + if isinstance(v, dict)] + + for trackIndex in range( + min(trackNums), max(trackNums) + 1): + for subTrackIndex in range( + min(subTrackNums), max(subTrackNums) + 1): + item = self.get_item(data, trackIndex, subTrackIndex) + if item is not {}: + new_order.update(item) + return new_order + + def get_item(self, data, trackIndex, subTrackIndex): + return {key: val for key, val in data.items() + if isinstance(val, dict) + if subTrackIndex == val["subTrackIndex"] + if trackIndex == val["trackIndex"]} + + def byteify(self, input): + """ + Converts unicode strings to strings + It goes through all dictionary + + Arguments: + input (dict/str): input + + Returns: + dict: with fixed values and keys + + """ + + if isinstance(input, dict): + return {self.byteify(key): self.byteify(value) + for key, value in input.items()} + elif isinstance(input, list): + return [self.byteify(element) for element in input] + elif isinstance(input, six.text_type): + return str(input) + else: + return input + + def switch(self, container, representation): + self.update(container, representation) + + def remove(self, container): + pass + + def containerise( + self, + track, + name, + namespace, + object_name, + context, + loader=None, + data=None + ): + """Bundle Hiero's object into an assembly and imprint it with metadata + + Containerisation enables a tracking of version, author and origin + for loaded assets. + + Arguments: + track (hiero.core.VideoTrack): object to imprint as container + name (str): Name of resulting assembly + namespace (str): Namespace under which to host container + object_name (str): name of container + context (dict): Asset information + loader (str, optional): Name of node used to produce this + container. + + Returns: + track_item (hiero.core.TrackItem): containerised object + + """ + + data_imprint = { + object_name: { + "schema": "openpype:container-2.0", + "id": AVALON_CONTAINER_ID, + "name": str(name), + "namespace": str(namespace), + "loader": str(loader), + "representation": str(context["representation"]["_id"]), + } + } + + if data: + for k, v in data.items(): + data_imprint[object_name].update({k: v}) + + self.log.debug("_ data_imprint: {}".format(data_imprint)) + phiero.set_track_openpype_tag(track, data_imprint) diff --git a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py index 8d2ed9a9c26..9489b1c4fb8 100644 --- a/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py +++ b/openpype/hosts/hiero/plugins/publish/collect_clip_effects.py @@ -16,6 +16,9 @@ def process(self, instance): review_track_index = instance.context.data.get("reviewTrackIndex") item = instance.data["item"] + if "audio" in instance.data["family"]: + return + # frame range self.handle_start = instance.data["handleStart"] self.handle_end = instance.data["handleEnd"] diff --git a/openpype/hosts/hiero/plugins/publish/precollect_instances.py b/openpype/hosts/hiero/plugins/publish/precollect_instances.py index 1fc4b1f696f..bb02919b35c 100644 --- a/openpype/hosts/hiero/plugins/publish/precollect_instances.py +++ b/openpype/hosts/hiero/plugins/publish/precollect_instances.py @@ -48,7 +48,7 @@ def process(self, context): self.log.debug("clip_name: {}".format(clip_name)) # get openpype tag data - tag_data = phiero.get_track_item_pype_data(track_item) + tag_data = phiero.get_trackitem_openpype_data(track_item) self.log.debug("__ tag_data: {}".format(pformat(tag_data))) if not tag_data: diff --git a/openpype/hosts/houdini/api/__init__.py b/openpype/hosts/houdini/api/__init__.py index fddf7ab98de..2663a55f6ff 100644 --- a/openpype/hosts/houdini/api/__init__.py +++ b/openpype/hosts/houdini/api/__init__.py @@ -1,24 +1,13 @@ from .pipeline import ( - install, - uninstall, - + HoudiniHost, ls, - containerise, + containerise ) from .plugin import ( Creator, ) -from .workio import ( - open_file, - save_file, - current_file, - has_unsaved_changes, - file_extensions, - work_root -) - from .lib import ( lsattr, lsattrs, @@ -29,22 +18,13 @@ __all__ = [ - "install", - "uninstall", + "HoudiniHost", "ls", "containerise", "Creator", - # Workfiles API - "open_file", - "save_file", - "current_file", - "has_unsaved_changes", - "file_extensions", - "work_root", - # Utility functions "lsattr", "lsattrs", @@ -52,7 +32,3 @@ "maintained_selection" ] - -# Backwards API compatibility -open = open_file -save = save_file diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index c8a7f92bb99..13f5a62ec33 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -1,6 +1,10 @@ +# -*- coding: utf-8 -*- +import sys +import os import uuid import logging from contextlib import contextmanager +import json import six @@ -8,10 +12,13 @@ from openpype.pipeline import legacy_io from openpype.pipeline.context_tools import get_current_project_asset - import hou + +self = sys.modules[__name__] +self._parent = None log = logging.getLogger(__name__) +JSON_PREFIX = "JSON:::" def get_asset_fps(): @@ -29,23 +36,18 @@ def set_id(node, unique_id, overwrite=False): def get_id(node): - """ - Get the `cbId` attribute of the given node + """Get the `cbId` attribute of the given node. + Args: node (hou.Node): the name of the node to retrieve the attribute from Returns: - str + str: cbId attribute of the node. """ - if node is None: - return - - id = node.parm("id") - if node is None: - return - return id + if node is not None: + return node.parm("id") def generate_ids(nodes, asset_id=None): @@ -281,7 +283,7 @@ def render_rop(ropnode): raise RuntimeError("Render failed: {0}".format(exc)) -def imprint(node, data): +def imprint(node, data, update=False): """Store attributes with value on a node Depending on the type of attribute it creates the correct parameter @@ -290,49 +292,76 @@ def imprint(node, data): http://www.sidefx.com/docs/houdini/hom/hou/ParmTemplate.html + Because of some update glitch where you cannot overwrite existing + ParmTemplates on node using: + `setParmTemplates()` and `parmTuplesInFolder()` + update is done in another pass. + Args: node(hou.Node): node object from Houdini data(dict): collection of attributes and their value + update (bool, optional): flag if imprint should update + already existing data or leave them untouched and only + add new. Returns: None """ + if not data: + return + if not node: + self.log.error("Node is not set, calling imprint on invalid data.") + return - parm_group = node.parmTemplateGroup() + current_parms = {p.name(): p for p in node.spareParms()} + update_parms = [] + templates = [] - parm_folder = hou.FolderParmTemplate("folder", "Extra") for key, value in data.items(): if value is None: continue - if isinstance(value, float): - parm = hou.FloatParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, bool): - parm = hou.ToggleParmTemplate(name=key, - label=key, - default_value=value) - elif isinstance(value, int): - parm = hou.IntParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - elif isinstance(value, six.string_types): - parm = hou.StringParmTemplate(name=key, - label=key, - num_components=1, - default_value=(value,)) - else: - raise TypeError("Unsupported type: %r" % type(value)) + parm = get_template_from_value(key, value) + + if key in current_parms: + if node.evalParm(key) == data[key]: + continue + if not update: + log.debug(f"{key} already exists on {node}") + else: + log.debug(f"replacing {key}") + update_parms.append(parm) + continue - parm_folder.addParmTemplate(parm) + templates.append(parm) + + parm_group = node.parmTemplateGroup() + parm_folder = parm_group.findFolder("Extra") + + # if folder doesn't exist yet, create one and append to it, + # else append to existing one + if not parm_folder: + parm_folder = hou.FolderParmTemplate("folder", "Extra") + parm_folder.setParmTemplates(templates) + parm_group.append(parm_folder) + else: + for template in templates: + parm_group.appendToFolder(parm_folder, template) + # this is needed because the pointer to folder + # is for some reason lost every call to `appendToFolder()` + parm_folder = parm_group.findFolder("Extra") - parm_group.append(parm_folder) node.setParmTemplateGroup(parm_group) + # TODO: Updating is done here, by calling probably deprecated functions. + # This needs to be addressed in the future. + if not update_parms: + return + + for parm in update_parms: + node.replaceSpareParmTuple(parm.name(), parm) + def lsattr(attr, value=None, root="/"): """Return nodes that have `attr` @@ -397,8 +426,22 @@ def read(node): """ # `spareParms` returns a tuple of hou.Parm objects - return {parameter.name(): parameter.eval() for - parameter in node.spareParms()} + data = {} + if not node: + return data + for parameter in node.spareParms(): + value = parameter.eval() + # test if value is json encoded dict + if isinstance(value, six.string_types) and \ + value.startswith(JSON_PREFIX): + try: + value = json.loads(value[len(JSON_PREFIX):]) + except json.JSONDecodeError: + # not a json + pass + data[parameter.name()] = value + + return data @contextmanager @@ -460,3 +503,89 @@ def reset_framerange(): hou.playbar.setFrameRange(frame_start, frame_end) hou.playbar.setPlaybackRange(frame_start, frame_end) hou.setFrame(frame_start) + + +def get_main_window(): + """Acquire Houdini's main window""" + if self._parent is None: + self._parent = hou.ui.mainQtWindow() + return self._parent + + +def get_template_from_value(key, value): + if isinstance(value, float): + parm = hou.FloatParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, bool): + parm = hou.ToggleParmTemplate(name=key, + label=key, + default_value=value) + elif isinstance(value, int): + parm = hou.IntParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, six.string_types): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=(value,)) + elif isinstance(value, (dict, list, tuple)): + parm = hou.StringParmTemplate(name=key, + label=key, + num_components=1, + default_value=( + JSON_PREFIX + json.dumps(value),)) + else: + raise TypeError("Unsupported type: %r" % type(value)) + + return parm + + +def get_frame_data(node): + """Get the frame data: start frame, end frame and steps. + + Args: + node(hou.Node) + + Returns: + dict: frame data for star, end and steps. + + """ + data = {} + + if node.parm("trange") is None: + + return data + + if node.evalParm("trange") == 0: + self.log.debug("trange is 0") + return data + + data["frameStart"] = node.evalParm("f1") + data["frameEnd"] = node.evalParm("f2") + data["steps"] = node.evalParm("f3") + + return data + + +def splitext(name, allowed_multidot_extensions): + # type: (str, list) -> tuple + """Split file name to name and extension. + + Args: + name (str): File name to split. + allowed_multidot_extensions (list of str): List of allowed multidot + extensions. + + Returns: + tuple: Name and extension. + """ + + for ext in allowed_multidot_extensions: + if name.endswith(ext): + return name[:-len(ext)], ext + + return os.path.splitext(name) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index e4af1913ef0..b0791fcb6c7 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -1,9 +1,13 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Houdini integration.""" import os import sys import logging import contextlib -import hou +import hou # noqa + +from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher import pyblish.api @@ -26,6 +30,7 @@ log = logging.getLogger("openpype.hosts.houdini") AVALON_CONTAINERS = "/obj/AVALON_CONTAINERS" +CONTEXT_CONTAINER = "/obj/OpenPypeContext" IS_HEADLESS = not hasattr(hou, "ui") PLUGINS_DIR = os.path.join(HOUDINI_HOST_DIR, "plugins") @@ -35,71 +40,139 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -self = sys.modules[__name__] -self._has_been_setup = False -self._parent = None -self._events = dict() - - -def install(): - _register_callbacks() - - pyblish.api.register_host("houdini") - pyblish.api.register_host("hython") - pyblish.api.register_host("hpython") - - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) - - log.info("Installing callbacks ... ") - # register_event_callback("init", on_init) - register_event_callback("before.save", before_save) - register_event_callback("save", on_save) - register_event_callback("open", on_open) - register_event_callback("new", on_new) - - pyblish.api.register_callback( - "instanceToggled", on_pyblish_instance_toggled - ) - - self._has_been_setup = True - # add houdini vendor packages - hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") - - sys.path.append(hou_pythonpath) - - # Set asset settings for the empty scene directly after launch of Houdini - # so it initializes into the correct scene FPS, Frame Range, etc. - # todo: make sure this doesn't trigger when opening with last workfile - _set_context_settings() - shelves.generate_shelves() - - -def uninstall(): - """Uninstall Houdini-specific functionality of avalon-core. - - This function is called automatically on calling `api.uninstall()`. - """ +class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): + name = "houdini" + + def __init__(self): + super(HoudiniHost, self).__init__() + self._op_events = {} + self._has_been_setup = False + + def install(self): + pyblish.api.register_host("houdini") + pyblish.api.register_host("hython") + pyblish.api.register_host("hpython") + + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + + log.info("Installing callbacks ... ") + # register_event_callback("init", on_init) + self._register_callbacks() + register_event_callback("before.save", before_save) + register_event_callback("save", on_save) + register_event_callback("open", on_open) + register_event_callback("new", on_new) + + pyblish.api.register_callback( + "instanceToggled", on_pyblish_instance_toggled + ) + + self._has_been_setup = True + # add houdini vendor packages + hou_pythonpath = os.path.join(HOUDINI_HOST_DIR, "vendor") + + sys.path.append(hou_pythonpath) + + # Set asset settings for the empty scene directly after launch of + # Houdini so it initializes into the correct scene FPS, + # Frame Range, etc. + # TODO: make sure this doesn't trigger when + # opening with last workfile. + _set_context_settings() + shelves.generate_shelves() + + def has_unsaved_changes(self): + return hou.hipFile.hasUnsavedChanges() + + def get_workfile_extensions(self): + return [".hip", ".hiplc", ".hipnc"] + + def save_workfile(self, dst_path=None): + # Force forwards slashes to avoid segfault + if dst_path: + dst_path = dst_path.replace("\\", "/") + hou.hipFile.save(file_name=dst_path, + save_to_recent_files=True) + return dst_path + + def open_workfile(self, filepath): + # Force forwards slashes to avoid segfault + filepath = filepath.replace("\\", "/") + + hou.hipFile.load(filepath, + suppress_save_prompt=True, + ignore_load_warnings=False) + + return filepath + + def get_current_workfile(self): + current_filepath = hou.hipFile.path() + if (os.path.basename(current_filepath) == "untitled.hip" and + not os.path.exists(current_filepath)): + # By default a new scene in houdini is saved in the current + # working directory as "untitled.hip" so we need to capture + # that and consider it 'not saved' when it's in that state. + return None + + return current_filepath + + def get_containers(self): + return ls() + + def _register_callbacks(self): + for event in self._op_events.copy().values(): + if event is None: + continue + + try: + hou.hipFile.removeEventCallback(event) + except RuntimeError as e: + log.info(e) + + self._op_events[on_file_event_callback] = hou.hipFile.addEventCallback( + on_file_event_callback + ) + + @staticmethod + def create_context_node(): + """Helper for creating context holding node. + + Returns: + hou.Node: context node + + """ + obj_network = hou.node("/obj") + op_ctx = obj_network.createNode( + "null", node_name="OpenPypeContext") + op_ctx.moveToGoodPosition() + op_ctx.setBuiltExplicitly(False) + op_ctx.setCreatorState("OpenPype") + op_ctx.setComment("OpenPype node to hold context metadata") + op_ctx.setColor(hou.Color((0.081, 0.798, 0.810))) + op_ctx.hide(True) + return op_ctx - pyblish.api.deregister_host("hython") - pyblish.api.deregister_host("hpython") - pyblish.api.deregister_host("houdini") + def update_context_data(self, data, changes): + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self.create_context_node() + lib.imprint(op_ctx, data) -def _register_callbacks(): - for event in self._events.copy().values(): - if event is None: - continue + def get_context_data(self): + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self.create_context_node() + return lib.read(op_ctx) - try: - hou.hipFile.removeEventCallback(event) - except RuntimeError as e: - log.info(e) + def save_file(self, dst_path=None): + # Force forwards slashes to avoid segfault + dst_path = dst_path.replace("\\", "/") - self._events[on_file_event_callback] = hou.hipFile.addEventCallback( - on_file_event_callback - ) + hou.hipFile.save(file_name=dst_path, + save_to_recent_files=True) def on_file_event_callback(event): @@ -113,22 +186,6 @@ def on_file_event_callback(event): emit_event("new") -def get_main_window(): - """Acquire Houdini's main window""" - if self._parent is None: - self._parent = hou.ui.mainQtWindow() - return self._parent - - -def teardown(): - """Remove integration""" - if not self._has_been_setup: - return - - self._has_been_setup = False - print("pyblish: Integration torn down successfully") - - def containerise(name, namespace, nodes, @@ -251,7 +308,7 @@ def on_open(): log.warning("Scene has outdated content.") # Get main window - parent = get_main_window() + parent = lib.get_main_window() if parent is None: log.info("Skipping outdated content pop-up " "because Houdini window can't be found.") diff --git a/openpype/hosts/houdini/api/plugin.py b/openpype/hosts/houdini/api/plugin.py index 2bbb65aa05f..e15e27c83fd 100644 --- a/openpype/hosts/houdini/api/plugin.py +++ b/openpype/hosts/houdini/api/plugin.py @@ -1,14 +1,19 @@ # -*- coding: utf-8 -*- """Houdini specific Avalon/Pyblish plugin definitions.""" import sys +from abc import ( + ABCMeta +) import six - import hou from openpype.pipeline import ( CreatorError, - LegacyCreator + LegacyCreator, + Creator as NewCreator, + CreatedInstance ) -from .lib import imprint +from openpype.lib import BoolDef +from .lib import imprint, read, lsattr class OpenPypeCreatorError(CreatorError): @@ -30,12 +35,15 @@ class Creator(LegacyCreator): when hovering over a node. The information is visible under the name of the node. + Deprecated: + This creator is deprecated and will be removed in future version. + """ defaults = ['Main'] def __init__(self, *args, **kwargs): super(Creator, self).__init__(*args, **kwargs) - self.nodes = list() + self.nodes = [] def process(self): """This is the base functionality to create instances in Houdini @@ -84,3 +92,187 @@ def process(self): OpenPypeCreatorError, OpenPypeCreatorError("Creator error: {}".format(er)), sys.exc_info()[2]) + + +class HoudiniCreatorBase(object): + @staticmethod + def cache_subsets(shared_data): + """Cache instances for Creators to shared data. + + Create `houdini_cached_subsets` key when needed in shared data and + fill it with all collected instances from the scene under its + respective creator identifiers. + + If legacy instances are detected in the scene, create + `houdini_cached_legacy_subsets` there and fill it with + all legacy subsets under family as a key. + + Args: + Dict[str, Any]: Shared data. + + Return: + Dict[str, Any]: Shared data dictionary. + + """ + if shared_data.get("houdini_cached_subsets") is None: + shared_data["houdini_cached_subsets"] = {} + if shared_data.get("houdini_cached_legacy_subsets") is None: + shared_data["houdini_cached_legacy_subsets"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + if not i.parm("creator_identifier"): + # we have legacy instance + family = i.parm("family").eval() + if family not in shared_data[ + "houdini_cached_legacy_subsets"]: + shared_data["houdini_cached_legacy_subsets"][ + family] = [i] + else: + shared_data[ + "houdini_cached_legacy_subsets"][family].append(i) + continue + + creator_id = i.parm("creator_identifier").eval() + if creator_id not in shared_data["houdini_cached_subsets"]: + shared_data["houdini_cached_subsets"][creator_id] = [i] + else: + shared_data[ + "houdini_cached_subsets"][creator_id].append(i) # noqa + return shared_data + + @staticmethod + def create_instance_node( + node_name, parent, + node_type="geometry"): + # type: (str, str, str) -> hou.Node + """Create node representing instance. + + Arguments: + node_name (str): Name of the new node. + parent (str): Name of the parent node. + node_type (str, optional): Type of the node. + + Returns: + hou.Node: Newly created instance node. + + """ + parent_node = hou.node(parent) + instance_node = parent_node.createNode( + node_type, node_name=node_name) + instance_node.moveToGoodPosition() + return instance_node + + +@six.add_metaclass(ABCMeta) +class HoudiniCreator(NewCreator, HoudiniCreatorBase): + """Base class for most of the Houdini creator plugins.""" + selected_nodes = [] + + def create(self, subset_name, instance_data, pre_create_data): + try: + if pre_create_data.get("use_selection"): + self.selected_nodes = hou.selectedNodes() + + # Get the node type and remove it from the data, not needed + node_type = instance_data.pop("node_type", None) + if node_type is None: + node_type = "geometry" + + instance_node = self.create_instance_node( + subset_name, "/out", node_type) + + self.customize_node_look(instance_node) + + instance_data["instance_node"] = instance_node.path() + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self) + self._add_instance_to_context(instance) + imprint(instance_node, instance.data_to_store()) + return instance + + except hou.Error as er: + six.reraise( + OpenPypeCreatorError, + OpenPypeCreatorError("Creator error: {}".format(er)), + sys.exc_info()[2]) + + def lock_parameters(self, node, parameters): + """Lock list of specified parameters on the node. + + Args: + node (hou.Node): Houdini node to lock parameters on. + parameters (list of str): List of parameter names. + + """ + for name in parameters: + try: + parm = node.parm(name) + parm.lock(True) + except AttributeError: + self.log.debug("missing lock pattern {}".format(name)) + + def collect_instances(self): + # cache instances if missing + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data[ + "houdini_cached_subsets"].get(self.identifier, []): + created_instance = CreatedInstance.from_existing( + read(instance), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + instance_node = hou.node(created_inst.get("instance_node")) + + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } + imprint( + instance_node, + new_values, + update=True + ) + + def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + for instance in instances: + instance_node = hou.node(instance.data.get("instance_node")) + if instance_node: + instance_node.destroy() + + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] + + @staticmethod + def customize_node_look( + node, color=None, + shape="chevron_down"): + """Set custom look for instance nodes. + + Args: + node (hou.Node): Node to set look. + color (hou.Color, Optional): Color of the node. + shape (str, Optional): Shape name of the node. + + Returns: + None + + """ + if not color: + color = hou.Color((0.616, 0.871, 0.769)) + node.setUserData('nodeshape', shape) + node.setColor(color) diff --git a/openpype/hosts/houdini/api/workio.py b/openpype/hosts/houdini/api/workio.py deleted file mode 100644 index 5f7efff333a..00000000000 --- a/openpype/hosts/houdini/api/workio.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Host API required Work Files tool""" -import os - -import hou - - -def file_extensions(): - return [".hip", ".hiplc", ".hipnc"] - - -def has_unsaved_changes(): - return hou.hipFile.hasUnsavedChanges() - - -def save_file(filepath): - - # Force forwards slashes to avoid segfault - filepath = filepath.replace("\\", "/") - - hou.hipFile.save(file_name=filepath, - save_to_recent_files=True) - - return filepath - - -def open_file(filepath): - - # Force forwards slashes to avoid segfault - filepath = filepath.replace("\\", "/") - - hou.hipFile.load(filepath, - suppress_save_prompt=True, - ignore_load_warnings=False) - - return filepath - - -def current_file(): - - current_filepath = hou.hipFile.path() - if (os.path.basename(current_filepath) == "untitled.hip" and - not os.path.exists(current_filepath)): - # By default a new scene in houdini is saved in the current - # working directory as "untitled.hip" so we need to capture - # that and consider it 'not saved' when it's in that state. - return None - - return current_filepath - - -def work_root(session): - work_dir = session["AVALON_WORKDIR"] - scene_dir = session.get("AVALON_SCENEDIR") - if scene_dir: - return os.path.join(work_dir, scene_dir) - else: - return work_dir diff --git a/openpype/hosts/houdini/plugins/create/convert_legacy.py b/openpype/hosts/houdini/plugins/create/convert_legacy.py new file mode 100644 index 00000000000..4b8041b4f55 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/convert_legacy.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +"""Convertor for legacy Houdini subsets.""" +from openpype.pipeline.create.creator_plugins import SubsetConvertorPlugin +from openpype.hosts.houdini.api.lib import imprint + + +class HoudiniLegacyConvertor(SubsetConvertorPlugin): + """Find and convert any legacy subsets in the scene. + + This Convertor will find all legacy subsets in the scene and will + transform them to the current system. Since the old subsets doesn't + retain any information about their original creators, the only mapping + we can do is based on their families. + + Its limitation is that you can have multiple creators creating subset + of the same family and there is no way to handle it. This code should + nevertheless cover all creators that came with OpenPype. + + """ + identifier = "io.openpype.creators.houdini.legacy" + family_to_id = { + "camera": "io.openpype.creators.houdini.camera", + "ass": "io.openpype.creators.houdini.ass", + "imagesequence": "io.openpype.creators.houdini.imagesequence", + "hda": "io.openpype.creators.houdini.hda", + "pointcache": "io.openpype.creators.houdini.pointcache", + "redshiftproxy": "io.openpype.creators.houdini.redshiftproxy", + "redshift_rop": "io.openpype.creators.houdini.redshift_rop", + "usd": "io.openpype.creators.houdini.usd", + "usdrender": "io.openpype.creators.houdini.usdrender", + "vdbcache": "io.openpype.creators.houdini.vdbcache" + } + + def __init__(self, *args, **kwargs): + super(HoudiniLegacyConvertor, self).__init__(*args, **kwargs) + self.legacy_subsets = {} + + def find_instances(self): + """Find legacy subsets in the scene. + + Legacy subsets are the ones that doesn't have `creator_identifier` + parameter on them. + + This is using cached entries done in + :py:meth:`~HoudiniCreatorBase.cache_subsets()` + + """ + self.legacy_subsets = self.collection_shared_data.get( + "houdini_cached_legacy_subsets") + if not self.legacy_subsets: + return + self.add_convertor_item("Found {} incompatible subset{}.".format( + len(self.legacy_subsets), "s" if len(self.legacy_subsets) > 1 else "") + ) + + def convert(self): + """Convert all legacy subsets to current. + + It is enough to add `creator_identifier` and `instance_node`. + + """ + if not self.legacy_subsets: + return + + for family, subsets in self.legacy_subsets.items(): + if family in self.family_to_id: + for subset in subsets: + data = { + "creator_identifier": self.family_to_id[family], + "instance_node": subset.path() + } + self.log.info("Converting {} to {}".format( + subset.path(), self.family_to_id[family])) + imprint(subset, data) diff --git a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py index eef86005f5b..fec64eb4a10 100644 --- a/openpype/hosts/houdini/plugins/create/create_alembic_camera.py +++ b/openpype/hosts/houdini/plugins/create/create_alembic_camera.py @@ -1,46 +1,49 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating alembic camera subsets.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance, CreatorError -class CreateAlembicCamera(plugin.Creator): - """Single baked camera from Alembic ROP""" +class CreateAlembicCamera(plugin.HoudiniCreator): + """Single baked camera from Alembic ROP.""" - name = "camera" + identifier = "io.openpype.creators.houdini.camera" label = "Camera (Abc)" family = "camera" icon = "camera" - def __init__(self, *args, **kwargs): - super(CreateAlembicCamera, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "alembic"}) - # Set node type to create for output - self.data.update({"node_type": "alembic"}) + instance = super(CreateAlembicCamera, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. - - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "filename": "$HIP/pyblish/%s.abc" % self.name, + "filename": hou.text.expandString( + "$HIP/pyblish/{}.abc".format(subset_name)), "use_sop_path": False, } - if self.nodes: - node = self.nodes[0] - path = node.path() + if self.selected_nodes: + if len(self.selected_nodes) > 1: + raise CreatorError("More than one item selected.") + path = self.selected_nodes[0].path() # Split the node path into the first root and the remainder # So we can set the root and objects parameters correctly _, root, remainder = path.split("/", 2) parms.update({"root": "/" + root, "objects": remainder}) - instance.setParms(parms) + instance_node.setParms(parms) # Lock the Use Sop Path setting so the # user doesn't accidentally enable it. - instance.parm("use_sop_path").lock(True) - instance.parm("trange").set(1) + to_lock = ["use_sop_path"] + self.lock_parameters(instance_node, to_lock) + + instance_node.parm("trange").set(1) diff --git a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py index 72088e43b01..8b310753d02 100644 --- a/openpype/hosts/houdini/plugins/create/create_arnold_ass.py +++ b/openpype/hosts/houdini/plugins/create/create_arnold_ass.py @@ -1,9 +1,12 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating Arnold ASS files.""" from openpype.hosts.houdini.api import plugin -class CreateArnoldAss(plugin.Creator): +class CreateArnoldAss(plugin.HoudiniCreator): """Arnold .ass Archive""" + identifier = "io.openpype.creators.houdini.ass" label = "Arnold ASS" family = "ass" icon = "magic" @@ -12,42 +15,39 @@ class CreateArnoldAss(plugin.Creator): # Default extension: `.ass` or `.ass.gz` ext = ".ass" - def __init__(self, *args, **kwargs): - super(CreateArnoldAss, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "arnold"}) - self.data.update({"node_type": "arnold"}) + instance = super(CreateArnoldAss, self).create( + subset_name, + instance_data, + pre_create_data) # type: plugin.CreatedInstance - def process(self): - node = super(CreateArnoldAss, self).process() - - basename = node.name() - node.setName(basename + "_ASS", unique_name=True) + instance_node = hou.node(instance.get("instance_node")) # Hide Properties Tab on Arnold ROP since that's used # for rendering instead of .ass Archive Export - parm_template_group = node.parmTemplateGroup() + parm_template_group = instance_node.parmTemplateGroup() parm_template_group.hideFolder("Properties", True) - node.setParmTemplateGroup(parm_template_group) + instance_node.setParmTemplateGroup(parm_template_group) - filepath = '$HIP/pyblish/`chs("subset")`.$F4{}'.format(self.ext) + filepath = "{}{}".format( + hou.text.expandString("$HIP/pyblish/"), + "{}.$F4{}".format(subset_name, self.ext) + ) parms = { # Render frame range "trange": 1, - # Arnold ROP settings "ar_ass_file": filepath, "ar_ass_export_enable": 1 } - node.setParms(parms) - # Lock the ASS export attribute - node.parm("ar_ass_export_enable").lock(True) + instance_node.setParms(parms) - # Lock some Avalon attributes - to_lock = ["family", "id"] - for name in to_lock: - parm = node.parm(name) - parm.lock(True) + # Lock any parameters in this list + to_lock = ["ar_ass_export_enable", "family", "id"] + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_composite.py b/openpype/hosts/houdini/plugins/create/create_composite.py index e2787080761..45af2b06302 100644 --- a/openpype/hosts/houdini/plugins/create/create_composite.py +++ b/openpype/hosts/houdini/plugins/create/create_composite.py @@ -1,44 +1,42 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating composite sequences.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateCompositeSequence(plugin.Creator): +class CreateCompositeSequence(plugin.HoudiniCreator): """Composite ROP to Image Sequence""" + identifier = "io.openpype.creators.houdini.imagesequence" label = "Composite (Image Sequence)" family = "imagesequence" icon = "gears" - def __init__(self, *args, **kwargs): - super(CreateCompositeSequence, self).__init__(*args, **kwargs) + ext = ".exr" - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - # Type of ROP node to create - self.data.update({"node_type": "comp"}) + instance_data.pop("active", None) + instance_data.update({"node_type": "comp"}) - def _process(self, instance): - """Creator main entry point. + instance = super(CreateCompositeSequence, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - Args: - instance (hou.Node): Created Houdini instance. + instance_node = hou.node(instance.get("instance_node")) + filepath = "{}{}".format( + hou.text.expandString("$HIP/pyblish/"), + "{}.$F4{}".format(subset_name, self.ext) + ) + parms = { + "trange": 1, + "copoutput": filepath + } - """ - parms = {"copoutput": "$HIP/pyblish/%s.$F4.exr" % self.name} - - if self.nodes: - node = self.nodes[0] - parms.update({"coppath": node.path()}) - - instance.setParms(parms) + instance_node.setParms(parms) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] - for name in to_lock: - try: - parm = instance.parm(name) - parm.lock(True) - except AttributeError: - # missing lock pattern - self.log.debug( - "missing lock pattern {}".format(name)) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index b98da8b8bb1..4bed83c2e9d 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -1,28 +1,22 @@ # -*- coding: utf-8 -*- -import hou - +"""Creator plugin for creating publishable Houdini Digital Assets.""" from openpype.client import ( get_asset_by_name, get_subsets, ) from openpype.pipeline import legacy_io -from openpype.hosts.houdini.api import lib from openpype.hosts.houdini.api import plugin -class CreateHDA(plugin.Creator): +class CreateHDA(plugin.HoudiniCreator): """Publish Houdini Digital Asset file.""" - name = "hda" + identifier = "io.openpype.creators.houdini.hda" label = "Houdini Digital Asset (Hda)" family = "hda" icon = "gears" maintain_selection = False - def __init__(self, *args, **kwargs): - super(CreateHDA, self).__init__(*args, **kwargs) - self.data.pop("active", None) - def _check_existing(self, subset_name): # type: (str) -> bool """Check if existing subset name versions already exists.""" @@ -40,55 +34,51 @@ def _check_existing(self, subset_name): } return subset_name.lower() in existing_subset_names_low - def _process(self, instance): - subset_name = self.data["subset"] - # get selected nodes - out = hou.node("/obj") - self.nodes = hou.selectedNodes() + def _create_instance_node( + self, node_name, parent, node_type="geometry"): + import hou - if (self.options or {}).get("useSelection") and self.nodes: - # if we have `use selection` enabled and we have some + parent_node = hou.node("/obj") + if self.selected_nodes: + # if we have `use selection` enabled, and we have some # selected nodes ... - subnet = out.collapseIntoSubnet( - self.nodes, - subnet_name="{}_subnet".format(self.name)) + subnet = parent_node.collapseIntoSubnet( + self.selected_nodes, + subnet_name="{}_subnet".format(node_name)) subnet.moveToGoodPosition() to_hda = subnet else: - to_hda = out.createNode( - "subnet", node_name="{}_subnet".format(self.name)) + to_hda = parent_node.createNode( + "subnet", node_name="{}_subnet".format(node_name)) if not to_hda.type().definition(): # if node type has not its definition, it is not user # created hda. We test if hda can be created from the node. if not to_hda.canCreateDigitalAsset(): - raise Exception( + raise plugin.OpenPypeCreatorError( "cannot create hda from node {}".format(to_hda)) hda_node = to_hda.createDigitalAsset( - name=subset_name, - hda_file_name="$HIP/{}.hda".format(subset_name) + name=node_name, + hda_file_name="$HIP/{}.hda".format(node_name) ) hda_node.layoutChildren() - elif self._check_existing(subset_name): + elif self._check_existing(node_name): raise plugin.OpenPypeCreatorError( ("subset {} is already published with different HDA" - "definition.").format(subset_name)) + "definition.").format(node_name)) else: hda_node = to_hda - hda_node.setName(subset_name) + hda_node.setName(node_name) + self.customize_node_look(hda_node) + return hda_node - # delete node created by Avalon in /out - # this needs to be addressed in future Houdini workflow refactor. + def create(self, subset_name, instance_data, pre_create_data): + instance_data.pop("active", None) - hou.node("/out/{}".format(subset_name)).destroy() + instance = super(CreateHDA, self).create( + subset_name, + instance_data, + pre_create_data) # type: plugin.CreatedInstance - try: - lib.imprint(hda_node, self.data) - except hou.OperationFailed: - raise plugin.OpenPypeCreatorError( - ("Cannot set metadata on asset. Might be that it already is " - "OpenPype asset.") - ) - - return hda_node + return instance diff --git a/openpype/hosts/houdini/plugins/create/create_pointcache.py b/openpype/hosts/houdini/plugins/create/create_pointcache.py index feb683edf60..6b6b2774229 100644 --- a/openpype/hosts/houdini/plugins/create/create_pointcache.py +++ b/openpype/hosts/houdini/plugins/create/create_pointcache.py @@ -1,48 +1,51 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating pointcache alembics.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreatePointCache(plugin.Creator): +class CreatePointCache(plugin.HoudiniCreator): """Alembic ROP to pointcache""" - - name = "pointcache" + identifier = "io.openpype.creators.houdini.pointcache" label = "Point Cache" family = "pointcache" icon = "gears" - def __init__(self, *args, **kwargs): - super(CreatePointCache, self).__init__(*args, **kwargs) - - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + def create(self, subset_name, instance_data, pre_create_data): + import hou - self.data.update({"node_type": "alembic"}) + instance_data.pop("active", None) + instance_data.update({"node_type": "alembic"}) - def _process(self, instance): - """Creator main entry point. + instance = super(CreatePointCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - Args: - instance (hou.Node): Created Houdini instance. - - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "use_sop_path": True, # Export single node from SOP Path - "build_from_path": True, # Direct path of primitive in output - "path_attrib": "path", # Pass path attribute for output + "use_sop_path": True, + "build_from_path": True, + "path_attrib": "path", "prim_to_detail_pattern": "cbId", - "format": 2, # Set format to Ogawa - "facesets": 0, # No face sets (by default exclude them) - "filename": "$HIP/pyblish/%s.abc" % self.name, + "format": 2, + "facesets": 0, + "filename": hou.text.expandString( + "$HIP/pyblish/{}.abc".format(subset_name)) } - if self.nodes: - node = self.nodes[0] - parms.update({"sop_path": node.path()}) + if self.selected_nodes: + parms["sop_path"] = self.selected_nodes[0].path() + + # try to find output node + for child in self.selected_nodes[0].children(): + if child.type().name() == "output": + parms["sop_path"] = child.path() + break - instance.setParms(parms) - instance.parm("trange").set(1) + instance_node.setParms(parms) + instance_node.parm("trange").set(1) # Lock any parameters in this list to_lock = ["prim_to_detail_pattern"] - for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py index da4d80bf2be..8b6a68437b7 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_proxy.py @@ -1,18 +1,20 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating Redshift proxies.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateRedshiftProxy(plugin.Creator): +class CreateRedshiftProxy(plugin.HoudiniCreator): """Redshift Proxy""" - + identifier = "io.openpype.creators.houdini.redshiftproxy" label = "Redshift Proxy" family = "redshiftproxy" icon = "magic" - def __init__(self, *args, **kwargs): - super(CreateRedshiftProxy, self).__init__(*args, **kwargs) - + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) # Redshift provides a `Redshift_Proxy_Output` node type which shows # a limited set of parameters by default and is set to extract a @@ -21,28 +23,24 @@ def __init__(self, *args, **kwargs): # why this happens. # TODO: Somehow enforce so that it only shows the original limited # attributes of the Redshift_Proxy_Output node type - self.data.update({"node_type": "Redshift_Proxy_Output"}) + instance_data.update({"node_type": "Redshift_Proxy_Output"}) - def _process(self, instance): - """Creator main entry point. + instance = super(CreateRedshiftProxy, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - Args: - instance (hou.Node): Created Houdini instance. + instance_node = hou.node(instance.get("instance_node")) - """ parms = { - "RS_archive_file": '$HIP/pyblish/`chs("subset")`.$F4.rs', + "RS_archive_file": '$HIP/pyblish/`{}.$F4.rs'.format(subset_name), } - if self.nodes: - node = self.nodes[0] - path = node.path() - parms["RS_archive_sopPath"] = path + if self.selected_nodes: + parms["RS_archive_sopPath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) # Lock some Avalon attributes - to_lock = ["family", "id"] - for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + to_lock = ["family", "id", "prim_to_detail_pattern"] + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py index 6949ca169b3..2cbe9bfda16 100644 --- a/openpype/hosts/houdini/plugins/create/create_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/create/create_redshift_rop.py @@ -1,41 +1,40 @@ -import hou +# -*- coding: utf-8 -*- +"""Creator plugin to create Redshift ROP.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateRedshiftROP(plugin.Creator): +class CreateRedshiftROP(plugin.HoudiniCreator): """Redshift ROP""" - + identifier = "io.openpype.creators.houdini.redshift_rop" label = "Redshift ROP" family = "redshift_rop" icon = "magic" defaults = ["master"] - def __init__(self, *args, **kwargs): - super(CreateRedshiftROP, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa + + instance_data.pop("active", None) + instance_data.update({"node_type": "Redshift_ROP"}) + # Add chunk size attribute + instance_data["chunkSize"] = 10 # Clear the family prefix from the subset - subset = self.data["subset"] + subset = subset_name subset_no_prefix = subset[len(self.family):] subset_no_prefix = subset_no_prefix[0].lower() + subset_no_prefix[1:] - self.data["subset"] = subset_no_prefix - - # Add chunk size attribute - self.data["chunkSize"] = 10 - - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) - - self.data.update({"node_type": "Redshift_ROP"}) + subset_name = subset_no_prefix - def _process(self, instance): - """Creator main entry point. + instance = super(CreateRedshiftROP, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - Args: - instance (hou.Node): Created Houdini instance. + instance_node = hou.node(instance.get("instance_node")) - """ - basename = instance.name() - instance.setName(basename + "_ROP", unique_name=True) + basename = instance_node.name() + instance_node.setName(basename + "_ROP", unique_name=True) # Also create the linked Redshift IPR Rop try: @@ -43,11 +42,12 @@ def _process(self, instance): "Redshift_IPR", node_name=basename + "_IPR" ) except hou.OperationFailed: - raise Exception(("Cannot create Redshift node. Is Redshift " - "installed and enabled?")) + raise plugin.OpenPypeCreatorError( + ("Cannot create Redshift node. Is Redshift " + "installed and enabled?")) # Move it to directly under the Redshift ROP - ipr_rop.setPosition(instance.position() + hou.Vector2(0, -1)) + ipr_rop.setPosition(instance_node.position() + hou.Vector2(0, -1)) # Set the linked rop to the Redshift ROP ipr_rop.parm("linked_rop").set(ipr_rop.relativePathTo(instance)) @@ -61,10 +61,8 @@ def _process(self, instance): "RS_outputMultilayerMode": 0, # no multi-layered exr "RS_outputBeautyAOVSuffix": "beauty", } - instance.setParms(parms) + instance_node.setParms(parms) # Lock some Avalon attributes to_lock = ["family", "id"] - for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_usd.py b/openpype/hosts/houdini/plugins/create/create_usd.py index 5bcb7840c06..51ed8237c59 100644 --- a/openpype/hosts/houdini/plugins/create/create_usd.py +++ b/openpype/hosts/houdini/plugins/create/create_usd.py @@ -1,39 +1,39 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating USDs.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateUSD(plugin.Creator): +class CreateUSD(plugin.HoudiniCreator): """Universal Scene Description""" - + identifier = "io.openpype.creators.houdini.usd" label = "USD (experimental)" family = "usd" icon = "gears" enabled = False - def __init__(self, *args, **kwargs): - super(CreateUSD, self).__init__(*args, **kwargs) - - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - self.data.update({"node_type": "usd"}) + instance_data.pop("active", None) + instance_data.update({"node_type": "usd"}) - def _process(self, instance): - """Creator main entry point. + instance = super(CreateUSD, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - Args: - instance (hou.Node): Created Houdini instance. + instance_node = hou.node(instance.get("instance_node")) - """ parms = { - "lopoutput": "$HIP/pyblish/%s.usd" % self.name, + "lopoutput": "$HIP/pyblish/{}.usd".format(subset_name), "enableoutputprocessor_simplerelativepaths": False, } - if self.nodes: - node = self.nodes[0] - parms.update({"loppath": node.path()}) + if self.selected_nodes: + parms["loppath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) # Lock any parameters in this list to_lock = [ @@ -42,6 +42,4 @@ def _process(self, instance): "family", "id", ] - for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_usdrender.py b/openpype/hosts/houdini/plugins/create/create_usdrender.py index cb3fe3f02b6..f78f0bed503 100644 --- a/openpype/hosts/houdini/plugins/create/create_usdrender.py +++ b/openpype/hosts/houdini/plugins/create/create_usdrender.py @@ -1,42 +1,41 @@ -import hou +# -*- coding: utf-8 -*- +"""Creator plugin for creating USD renders.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateUSDRender(plugin.Creator): +class CreateUSDRender(plugin.HoudiniCreator): """USD Render ROP in /stage""" - + identifier = "io.openpype.creators.houdini.usdrender" label = "USD Render (experimental)" family = "usdrender" icon = "magic" - def __init__(self, *args, **kwargs): - super(CreateUSDRender, self).__init__(*args, **kwargs) + def create(self, subset_name, instance_data, pre_create_data): + import hou # noqa - self.parent = hou.node("/stage") + instance_data["parent"] = hou.node("/stage") # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) + instance_data.pop("active", None) + instance_data.update({"node_type": "usdrender"}) - self.data.update({"node_type": "usdrender"}) + instance = super(CreateUSDRender, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - def _process(self, instance): - """Creator main entry point. + instance_node = hou.node(instance.get("instance_node")) - Args: - instance (hou.Node): Created Houdini instance. - """ parms = { # Render frame range "trange": 1 } - if self.nodes: - node = self.nodes[0] - parms.update({"loppath": node.path()}) - instance.setParms(parms) + if self.selected_nodes: + parms["loppath"] = self.selected_nodes[0].path() + instance_node.setParms(parms) # Lock some Avalon attributes to_lock = ["family", "id"] - for name in to_lock: - parm = instance.parm(name) - parm.lock(True) + self.lock_parameters(instance_node, to_lock) diff --git a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py index 242c21fc723..1a5011745f8 100644 --- a/openpype/hosts/houdini/plugins/create/create_vbd_cache.py +++ b/openpype/hosts/houdini/plugins/create/create_vbd_cache.py @@ -1,38 +1,36 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating VDB Caches.""" from openpype.hosts.houdini.api import plugin +from openpype.pipeline import CreatedInstance -class CreateVDBCache(plugin.Creator): +class CreateVDBCache(plugin.HoudiniCreator): """OpenVDB from Geometry ROP""" - + identifier = "io.openpype.creators.houdini.vdbcache" name = "vbdcache" label = "VDB Cache" family = "vdbcache" icon = "cloud" - def __init__(self, *args, **kwargs): - super(CreateVDBCache, self).__init__(*args, **kwargs) - - # Remove the active, we are checking the bypass flag of the nodes - self.data.pop("active", None) - - # Set node type to create for output - self.data["node_type"] = "geometry" + def create(self, subset_name, instance_data, pre_create_data): + import hou - def _process(self, instance): - """Creator main entry point. + instance_data.pop("active", None) + instance_data.update({"node_type": "geometry"}) - Args: - instance (hou.Node): Created Houdini instance. + instance = super(CreateVDBCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance - """ + instance_node = hou.node(instance.get("instance_node")) parms = { - "sopoutput": "$HIP/pyblish/%s.$F4.vdb" % self.name, + "sopoutput": "$HIP/pyblish/{}.$F4.vdb".format(subset_name), "initsim": True, "trange": 1 } - if self.nodes: - node = self.nodes[0] - parms.update({"soppath": node.path()}) + if self.selected_nodes: + parms["soppath"] = self.selected_nodes[0].path() - instance.setParms(parms) + instance_node.setParms(parms) diff --git a/openpype/hosts/houdini/plugins/create/create_workfile.py b/openpype/hosts/houdini/plugins/create/create_workfile.py new file mode 100644 index 00000000000..0c6d8408103 --- /dev/null +++ b/openpype/hosts/houdini/plugins/create/create_workfile.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating workfiles.""" +from openpype.hosts.houdini.api import plugin +from openpype.hosts.houdini.api.lib import read, imprint +from openpype.hosts.houdini.api.pipeline import CONTEXT_CONTAINER +from openpype.pipeline import CreatedInstance, AutoCreator +from openpype.pipeline import legacy_io +from openpype.client import get_asset_by_name +import hou + + +class CreateWorkfile(plugin.HoudiniCreatorBase, AutoCreator): + """Workfile auto-creator.""" + identifier = "io.openpype.creators.houdini.workfile" + label = "Workfile" + family = "workfile" + icon = "document" + + default_variant = "Main" + + def create(self): + variant = self.default_variant + current_instance = next( + ( + instance for instance in self.create_context.instances + if instance.creator_identifier == self.identifier + ), None) + + project_name = self.project_name + asset_name = legacy_io.Session["AVALON_ASSET"] + task_name = legacy_io.Session["AVALON_TASK"] + host_name = legacy_io.Session["AVALON_APP"] + + if current_instance is None: + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + data = { + "asset": asset_name, + "task": task_name, + "variant": variant + } + data.update( + self.get_dynamic_data( + variant, task_name, asset_doc, + project_name, host_name, current_instance) + ) + self.log.info("Auto-creating workfile instance...") + current_instance = CreatedInstance( + self.family, subset_name, data, self + ) + self._add_instance_to_context(current_instance) + elif ( + current_instance["asset"] != asset_name + or current_instance["task"] != task_name + ): + # Update instance context if is not the same + asset_doc = get_asset_by_name(project_name, asset_name) + subset_name = self.get_subset_name( + variant, task_name, asset_doc, project_name, host_name + ) + current_instance["asset"] = asset_name + current_instance["task"] = task_name + current_instance["subset"] = subset_name + + # write workfile information to context container. + op_ctx = hou.node(CONTEXT_CONTAINER) + if not op_ctx: + op_ctx = self.create_context_node() + + workfile_data = {"workfile": current_instance.data_to_store()} + imprint(op_ctx, workfile_data) + + def collect_instances(self): + op_ctx = hou.node(CONTEXT_CONTAINER) + instance = read(op_ctx) + if not instance: + return + workfile = instance.get("workfile") + if not workfile: + return + created_instance = CreatedInstance.from_existing( + workfile, self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + op_ctx = hou.node(CONTEXT_CONTAINER) + for created_inst, _changes in update_list: + if created_inst["creator_identifier"] == self.identifier: + workfile_data = {"workfile": created_inst.data_to_store()} + imprint(op_ctx, workfile_data, update=True) diff --git a/openpype/hosts/houdini/plugins/publish/collect_active_state.py b/openpype/hosts/houdini/plugins/publish/collect_active_state.py index 862d5720e18..cc3f2e7fae4 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_active_state.py +++ b/openpype/hosts/houdini/plugins/publish/collect_active_state.py @@ -1,4 +1,5 @@ import pyblish.api +import hou class CollectInstanceActiveState(pyblish.api.InstancePlugin): @@ -24,7 +25,7 @@ def process(self, instance): # Check bypass state and reverse active = True - node = instance[0] + node = hou.node(instance.get("instance_node")) if hasattr(node, "isBypassed"): active = not node.isBypassed() diff --git a/openpype/hosts/houdini/plugins/publish/collect_current_file.py b/openpype/hosts/houdini/plugins/publish/collect_current_file.py index 1383c274a28..9cca07fdc7c 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/collect_current_file.py @@ -5,19 +5,20 @@ import pyblish.api -class CollectHoudiniCurrentFile(pyblish.api.ContextPlugin): +class CollectHoudiniCurrentFile(pyblish.api.InstancePlugin): """Inject the current working file into context""" order = pyblish.api.CollectorOrder - 0.01 label = "Houdini Current File" hosts = ["houdini"] + family = ["workfile"] - def process(self, context): + def process(self, instance): """Inject the current working file""" current_file = hou.hipFile.path() if not os.path.exists(current_file): - # By default Houdini will even point a new scene to a path. + # By default, Houdini will even point a new scene to a path. # However if the file is not saved at all and does not exist, # we assume the user never set it. filepath = "" @@ -34,43 +35,26 @@ def process(self, context): "saved correctly." ) - context.data["currentFile"] = current_file + instance.context.data["currentFile"] = current_file folder, file = os.path.split(current_file) filename, ext = os.path.splitext(file) - task = legacy_io.Session["AVALON_TASK"] - - data = {} - - # create instance - instance = context.create_instance(name=filename) - subset = 'workfile' + task.capitalize() - - data.update({ - "subset": subset, - "asset": os.getenv("AVALON_ASSET", None), - "label": subset, - "publish": True, - "family": 'workfile', - "families": ['workfile'], + instance.data.update({ "setMembers": [current_file], - "frameStart": context.data['frameStart'], - "frameEnd": context.data['frameEnd'], - "handleStart": context.data['handleStart'], - "handleEnd": context.data['handleEnd'] + "frameStart": instance.context.data['frameStart'], + "frameEnd": instance.context.data['frameEnd'], + "handleStart": instance.context.data['handleStart'], + "handleEnd": instance.context.data['handleEnd'] }) - data['representations'] = [{ + instance.data['representations'] = [{ 'name': ext.lstrip("."), 'ext': ext.lstrip("."), 'files': file, "stagingDir": folder, }] - instance.data.update(data) - self.log.info('Collected instance: {}'.format(file)) self.log.info('Scene path: {}'.format(current_file)) self.log.info('staging Dir: {}'.format(folder)) - self.log.info('subset: {}'.format(subset)) diff --git a/openpype/hosts/houdini/plugins/publish/collect_frames.py b/openpype/hosts/houdini/plugins/publish/collect_frames.py index 9bd43d8a096..531cdf12495 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_frames.py +++ b/openpype/hosts/houdini/plugins/publish/collect_frames.py @@ -1,19 +1,13 @@ +# -*- coding: utf-8 -*- +"""Collector plugin for frames data on ROP instances.""" import os import re -import hou +import hou # noqa import pyblish.api from openpype.hosts.houdini.api import lib -def splitext(name, allowed_multidot_extensions): - - for ext in allowed_multidot_extensions: - if name.endswith(ext): - return name[:-len(ext)], ext - - return os.path.splitext(name) - class CollectFrames(pyblish.api.InstancePlugin): """Collect all frames which would be saved from the ROP nodes""" @@ -24,7 +18,9 @@ class CollectFrames(pyblish.api.InstancePlugin): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.data["instance_node"]) + frame_data = lib.get_frame_data(ropnode) + instance.data.update(frame_data) start_frame = instance.data.get("frameStart", None) end_frame = instance.data.get("frameEnd", None) @@ -38,13 +34,13 @@ def process(self, instance): self.log.warning("Using current frame: {}".format(hou.frame())) output = output_parm.eval() - _, ext = splitext(output, + _, ext = lib.splitext(output, allowed_multidot_extensions=[".ass.gz"]) file_name = os.path.basename(output) result = file_name # Get the filename pattern match from the output - # path so we can compute all frames that would + # path, so we can compute all frames that would # come out from rendering the ROP node if there # is a frame pattern in the name pattern = r"\w+\.(\d+)" + re.escape(ext) @@ -63,8 +59,9 @@ def process(self, instance): # for a custom frame list. So this should be refactored. instance.data.update({"frames": result}) - def create_file_list(self, match, start_frame, end_frame): - """Collect files based on frame range and regex.match + @staticmethod + def create_file_list(match, start_frame, end_frame): + """Collect files based on frame range and `regex.match` Args: match(re.match): match object diff --git a/openpype/hosts/houdini/plugins/publish/collect_instances.py b/openpype/hosts/houdini/plugins/publish/collect_instances.py index d38927984aa..bb856305524 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_instances.py +++ b/openpype/hosts/houdini/plugins/publish/collect_instances.py @@ -47,6 +47,11 @@ def process(self, context): if node.evalParm("id") != "pyblish.avalon.instance": continue + # instance was created by new creator code, skip it as + # it is already collected. + if node.parm("creator_identifier"): + continue + has_family = node.evalParm("family") assert has_family, "'%s' is missing 'family'" % node.name() @@ -58,7 +63,8 @@ def process(self, context): data.update({"active": not node.isBypassed()}) # temporarily translation of `active` to `publish` till issue has - # been resolved, https://github.com/pyblish/pyblish-base/issues/307 + # been resolved. + # https://github.com/pyblish/pyblish-base/issues/307 if "active" in data: data["publish"] = data["active"] @@ -78,6 +84,7 @@ def process(self, context): instance.data["families"] = [instance.data["family"]] instance[:] = [node] + instance.data["instance_node"] = node.path() instance.data.update(data) def sort_by_family(instance): diff --git a/openpype/hosts/houdini/plugins/publish/collect_output_node.py b/openpype/hosts/houdini/plugins/publish/collect_output_node.py index 0130c0a8da5..601ed17b39a 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/collect_output_node.py @@ -22,7 +22,7 @@ def process(self, instance): import hou - node = instance[0] + node = hou.node(instance.data["instance_node"]) # Get sop path node_type = node.type().name() diff --git a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py index 72b554b567e..346bdf3421b 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py +++ b/openpype/hosts/houdini/plugins/publish/collect_redshift_rop.py @@ -69,7 +69,7 @@ class CollectRedshiftROPRenderProducts(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = hou.node(instance.get("instance_node")) # Collect chunkSize chunk_size_parm = rop.parm("chunkSize") diff --git a/openpype/hosts/houdini/plugins/publish/collect_render_products.py b/openpype/hosts/houdini/plugins/publish/collect_render_products.py index d7163b43c0d..fcd80e0082f 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_render_products.py +++ b/openpype/hosts/houdini/plugins/publish/collect_render_products.py @@ -53,7 +53,7 @@ def process(self, instance): node = instance.data.get("output_node") if not node: - rop_path = instance[0].path() + rop_path = instance.data["instance_node"].path() raise RuntimeError( "No output node found. Make sure to connect an " "input to the USD ROP: %s" % rop_path diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index cf8d61cda32..81274c670e0 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -1,6 +1,6 @@ import pyblish.api -from openyppe.client import get_subset_by_name, get_asset_by_name +from openpype.client import get_subset_by_name, get_asset_by_name from openpype.pipeline import legacy_io import openpype.lib.usdlib as usdlib diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py index e3985e3c972..833add854b0 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_layers.py @@ -3,6 +3,8 @@ import pyblish.api import openpype.hosts.houdini.api.usd as usdlib +import hou + class CollectUsdLayers(pyblish.api.InstancePlugin): """Collect the USD Layers that have configured save paths.""" @@ -19,7 +21,7 @@ def process(self, instance): self.log.debug("No output node found..") return - rop_node = instance[0] + rop_node = hou.node(instance.get("instance_node")) save_layers = [] for layer in usdlib.get_configured_save_layers(rop_node): @@ -54,8 +56,10 @@ def process(self, instance): layer_inst.data["subset"] = "__stub__" layer_inst.data["label"] = label layer_inst.data["asset"] = instance.data["asset"] - layer_inst.append(instance[0]) # include same USD ROP - layer_inst.append((layer, save_path)) # include layer data + # include same USD ROP + layer_inst.append(rop_node) + # include layer data + layer_inst.append((layer, save_path)) # Allow this subset to be grouped into a USD Layer on creation layer_inst.data["subsetGroup"] = "USD Layer" diff --git a/openpype/hosts/houdini/plugins/publish/extract_alembic.py b/openpype/hosts/houdini/plugins/publish/extract_alembic.py index 758d4c560b1..cb2d4ef424a 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_alembic.py +++ b/openpype/hosts/houdini/plugins/publish/extract_alembic.py @@ -5,6 +5,8 @@ from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractAlembic(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractAlembic(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter output = ropnode.evalParm("filename") diff --git a/openpype/hosts/houdini/plugins/publish/extract_ass.py b/openpype/hosts/houdini/plugins/publish/extract_ass.py index a302b451cbc..0d246625bab 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_ass.py +++ b/openpype/hosts/houdini/plugins/publish/extract_ass.py @@ -5,6 +5,8 @@ from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractAss(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractAss(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved @@ -33,8 +35,12 @@ def process(self, instance): # error and thus still continues to the integrator. To capture that # we make sure all files exist files = instance.data["frames"] - missing = [fname for fname in files - if not os.path.exists(os.path.join(staging_dir, fname))] + missing = [] + for file_name in files: + full_path = os.path.normpath(os.path.join(staging_dir, file_name)) + if not os.path.exists(full_path): + missing.append(full_path) + if missing: raise RuntimeError("Failed to complete Arnold ass extraction. " "Missing output files: {}".format(missing)) diff --git a/openpype/hosts/houdini/plugins/publish/extract_composite.py b/openpype/hosts/houdini/plugins/publish/extract_composite.py index 23e875f1073..7a1ab36b93d 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_composite.py +++ b/openpype/hosts/houdini/plugins/publish/extract_composite.py @@ -1,9 +1,10 @@ import os - import pyblish.api from openpype.pipeline import publish -from openpype.hosts.houdini.api.lib import render_rop +from openpype.hosts.houdini.api.lib import render_rop, splitext + +import hou class ExtractComposite(publish.Extractor): @@ -15,7 +16,7 @@ class ExtractComposite(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.data["instance_node"]) # Get the filename from the copoutput parameter # `.evalParm(parameter)` will make sure all tokens are resolved @@ -28,8 +29,24 @@ def process(self, instance): render_rop(ropnode) - if "files" not in instance.data: - instance.data["files"] = [] + output = instance.data["frames"] + _, ext = splitext(output[0], []) + ext = ext.lstrip(".") + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + "name": ext, + "ext": ext, + "files": output, + "stagingDir": staging_dir, + "frameStart": instance.data["frameStart"], + "frameEnd": instance.data["frameEnd"], + } + + from pprint import pformat + + self.log.info(pformat(representation)) - frames = instance.data["frames"] - instance.data["files"].append(frames) + instance.data["representations"].append(representation) diff --git a/openpype/hosts/houdini/plugins/publish/extract_hda.py b/openpype/hosts/houdini/plugins/publish/extract_hda.py index 7dd03a92b7c..8b97bf364f1 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_hda.py +++ b/openpype/hosts/houdini/plugins/publish/extract_hda.py @@ -1,11 +1,9 @@ # -*- coding: utf-8 -*- import os - from pprint import pformat - import pyblish.api - from openpype.pipeline import publish +import hou class ExtractHDA(publish.Extractor): @@ -17,7 +15,7 @@ class ExtractHDA(publish.Extractor): def process(self, instance): self.log.info(pformat(instance.data)) - hda_node = instance[0] + hda_node = hou.node(instance.data.get("instance_node")) hda_def = hda_node.type().definition() hda_options = hda_def.options() hda_options.setSaveInitialParmsAndContents(True) diff --git a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py index ca9be64a477..29ede98a520 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py +++ b/openpype/hosts/houdini/plugins/publish/extract_redshift_proxy.py @@ -5,6 +5,8 @@ from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractRedshiftProxy(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractRedshiftProxy(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd.py b/openpype/hosts/houdini/plugins/publish/extract_usd.py index 78c32affb4e..cbeb5add718 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd.py @@ -5,6 +5,7 @@ from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou class ExtractUSD(publish.Extractor): @@ -17,7 +18,7 @@ class ExtractUSD(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter output = ropnode.evalParm("lopoutput") diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index f686f712bbe..0288b7363a7 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -187,7 +187,7 @@ def process(self, instance): # Main ROP node, either a USD Rop or ROP network with # multiple USD ROPs - node = instance[0] + node = hou.node(instance.get("instance_node")) # Collect any output dependencies that have not been processed yet # during extraction of other instances diff --git a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py index 26ec423048f..434d6a21601 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py +++ b/openpype/hosts/houdini/plugins/publish/extract_vdb_cache.py @@ -5,6 +5,8 @@ from openpype.pipeline import publish from openpype.hosts.houdini.api.lib import render_rop +import hou + class ExtractVDBCache(publish.Extractor): @@ -15,7 +17,7 @@ class ExtractVDBCache(publish.Extractor): def process(self, instance): - ropnode = instance[0] + ropnode = hou.node(instance.get("instance_node")) # Get the filename from the filename parameter # `.evalParm(parameter)` will make sure all tokens are resolved diff --git a/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml new file mode 100644 index 00000000000..0f92560bf7c --- /dev/null +++ b/openpype/hosts/houdini/plugins/publish/help/validate_vdb_input_node.xml @@ -0,0 +1,21 @@ + + + +Scene setting + +## Invalid input node + +VDB input must have the same number of VDBs, points, primitives and vertices as output. + + + +### __Detailed Info__ (optional) + +A VDB is an inherited type of Prim, holds the following data: + - Primitives: 1 + - Points: 1 + - Vertices: 1 + - VDBs: 1 + + + \ No newline at end of file diff --git a/openpype/hosts/houdini/plugins/publish/increment_current_file.py b/openpype/hosts/houdini/plugins/publish/increment_current_file.py index c990f481d32..16d9ef9aec9 100644 --- a/openpype/hosts/houdini/plugins/publish/increment_current_file.py +++ b/openpype/hosts/houdini/plugins/publish/increment_current_file.py @@ -2,7 +2,7 @@ from openpype.lib import version_up from openpype.pipeline import registered_host - +from openpype.hosts.houdini.api import HoudiniHost class IncrementCurrentFile(pyblish.api.ContextPlugin): """Increment the current file. @@ -20,11 +20,11 @@ class IncrementCurrentFile(pyblish.api.ContextPlugin): def process(self, context): # Filename must not have changed since collecting - host = registered_host() + host = registered_host() # type: HoudiniHost current_file = host.current_file() assert ( context.data["currentFile"] == current_file ), "Collected filename from current scene name." new_filepath = version_up(current_file) - host.save(new_filepath) + host.save_workfile(new_filepath) diff --git a/openpype/hosts/houdini/plugins/publish/save_scene.py b/openpype/hosts/houdini/plugins/publish/save_scene.py index 6128c7af77c..d6e07ccab09 100644 --- a/openpype/hosts/houdini/plugins/publish/save_scene.py +++ b/openpype/hosts/houdini/plugins/publish/save_scene.py @@ -14,13 +14,13 @@ def process(self, context): # Filename must not have changed since collecting host = registered_host() - current_file = host.current_file() + current_file = host.get_current_workfile() assert context.data['currentFile'] == current_file, ( "Collected filename from current scene name." ) if host.has_unsaved_changes(): - self.log.info("Saving current file..") - host.save_file(current_file) + self.log.info("Saving current file {}...".format(current_file)) + host.save_workfile(current_file) else: self.log.debug("No unsaved changes, skipping file save..") diff --git a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py deleted file mode 100644 index ac408bc842a..00000000000 --- a/openpype/hosts/houdini/plugins/publish/valiate_vdb_input_node.py +++ /dev/null @@ -1,47 +0,0 @@ -import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder - - -class ValidateVDBInputNode(pyblish.api.InstancePlugin): - """Validate that the node connected to the output node is of type VDB. - - Regardless of the amount of VDBs create the output will need to have an - equal amount of VDBs, points, primitives and vertices - - A VDB is an inherited type of Prim, holds the following data: - - Primitives: 1 - - Points: 1 - - Vertices: 1 - - VDBs: 1 - - """ - - order = ValidateContentsOrder + 0.1 - families = ["vdbcache"] - hosts = ["houdini"] - label = "Validate Input Node (VDB)" - - def process(self, instance): - invalid = self.get_invalid(instance) - if invalid: - raise RuntimeError( - "Node connected to the output node is not" "of type VDB!" - ) - - @classmethod - def get_invalid(cls, instance): - - node = instance.data["output_node"] - - prims = node.geometry().prims() - nr_of_prims = len(prims) - - nr_of_points = len(node.geometry().points()) - if nr_of_points != nr_of_prims: - cls.log.error("The number of primitives and points do not match") - return [instance] - - for prim in prims: - if prim.numVertices() != 1: - cls.log.error("Found primitive with more than 1 vertex!") - return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py index ea800707fb1..86e92a052fe 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py +++ b/openpype/hosts/houdini/plugins/publish/validate_abc_primitive_to_detail.py @@ -1,8 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api from collections import defaultdict - -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): @@ -16,7 +16,7 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Primitive to Detail (Abc)" @@ -24,18 +24,26 @@ class ValidateAbcPrimitiveToDetail(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Primitives found with inconsistent primitive " - "to detail attributes. See log." + raise PublishValidationError( + ("Primitives found with inconsistent primitive " + "to detail attributes. See log."), + title=self.label ) @classmethod def get_invalid(cls, instance): + import hou # noqa + output_node = instance.data.get("output_node") + rop_node = hou.node(instance.data["instance_node"]) + if output_node is None: + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % rop_node.path() + ) - output = instance.data["output_node"] + return [rop_node.path()] - rop = instance[0] - pattern = rop.parm("prim_to_detail_pattern").eval().strip() + pattern = rop_node.parm("prim_to_detail_pattern").eval().strip() if not pattern: cls.log.debug( "Alembic ROP has no 'Primitive to Detail' pattern. " @@ -43,7 +51,7 @@ def get_invalid(cls, instance): ) return - build_from_path = rop.parm("build_from_path").eval() + build_from_path = rop_node.parm("build_from_path").eval() if not build_from_path: cls.log.debug( "Alembic ROP has 'Build from Path' disabled. " @@ -51,14 +59,14 @@ def get_invalid(cls, instance): ) return - path_attr = rop.parm("path_attrib").eval() + path_attr = rop_node.parm("path_attrib").eval() if not path_attr: cls.log.error( "The Alembic ROP node has no Path Attribute" "value set, but 'Build Hierarchy from Attribute'" "is enabled." ) - return [rop.path()] + return [rop_node.path()] # Let's assume each attribute is explicitly named for now and has no # wildcards for Primitive to Detail. This simplifies the check. @@ -67,7 +75,7 @@ def get_invalid(cls, instance): # Check if the primitive attribute exists frame = instance.data.get("frameStart", 0) - geo = output.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) # If there are no primitives on the start frame then it might be # something that is emitted over time. As such we can't actually @@ -86,7 +94,7 @@ def get_invalid(cls, instance): "Geometry Primitives are missing " "path attribute: `%s`" % path_attr ) - return [output.path()] + return [output_node.path()] # Ensure at least a single string value is present if not attrib.strings(): @@ -94,7 +102,7 @@ def get_invalid(cls, instance): "Primitive path attribute has no " "string values: %s" % path_attr ) - return [output.path()] + return [output_node.path()] paths = None for attr in pattern.split(" "): @@ -130,4 +138,4 @@ def get_invalid(cls, instance): "Path has multiple values: %s (path: %s)" % (list(values), path) ) - return [output.path()] + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py index cbed3ea235e..44d58cfa363 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_face_sets.py @@ -1,7 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api - -from openpype.pipeline.publish import ValidateContentsOrder - +import hou class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """Validate Face Sets are disabled for extraction to pointcache. @@ -18,14 +17,14 @@ class ValidateAlembicROPFaceSets(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Alembic ROP Face Sets" def process(self, instance): - rop = instance[0] + rop = hou.node(instance.data["instance_node"]) facesets = rop.parm("facesets").eval() # 0 = No Face Sets diff --git a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py index 2625ae5f83f..bafb206bd37 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_alembic_input_node.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api - -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError +import hou class ValidateAlembicInputNode(pyblish.api.InstancePlugin): @@ -12,7 +13,7 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["pointcache"] hosts = ["houdini"] label = "Validate Input Node (Abc)" @@ -20,18 +21,28 @@ class ValidateAlembicInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Primitive types found that are not supported" - "for Alembic output." + raise PublishValidationError( + ("Primitive types found that are not supported" + "for Alembic output."), + title=self.label ) @classmethod def get_invalid(cls, instance): invalid_prim_types = ["VDB", "Volume"] - node = instance.data["output_node"] + output_node = instance.data.get("output_node") + + if output_node is None: + node = hou.node(instance.data["instance_node"]) + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % node.path() + ) + + return [node.path()] - if not hasattr(node, "geometry"): + if not hasattr(output_node, "geometry"): # In the case someone has explicitly set an Object # node instead of a SOP node in Geometry context # then for now we ignore - this allows us to also @@ -40,7 +51,7 @@ def get_invalid(cls, instance): return frame = instance.data.get("frameStart", 0) - geo = node.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) invalid = False for prim_type in invalid_prim_types: diff --git a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py index 5eb8f93d03e..f11f9c0c622 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py +++ b/openpype/hosts/houdini/plugins/publish/validate_animation_settings.py @@ -1,6 +1,7 @@ import pyblish.api from openpype.hosts.houdini.api import lib +import hou class ValidateAnimationSettings(pyblish.api.InstancePlugin): @@ -36,7 +37,7 @@ def process(self, instance): @classmethod def get_invalid(cls, instance): - node = instance[0] + node = hou.node(instance.get("instance_node")) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_bypass.py b/openpype/hosts/houdini/plugins/publish/validate_bypass.py index 7cf8da69d64..1bf51a986c3 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_bypass.py +++ b/openpype/hosts/houdini/plugins/publish/validate_bypass.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError +import hou class ValidateBypassed(pyblish.api.InstancePlugin): """Validate all primitives build hierarchy from attribute when enabled. @@ -11,7 +13,7 @@ class ValidateBypassed(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder - 0.1 + order = pyblish.api.ValidatorOrder - 0.1 families = ["*"] hosts = ["houdini"] label = "Validate ROP Bypass" @@ -26,14 +28,15 @@ def process(self, instance): invalid = self.get_invalid(instance) if invalid: rop = invalid[0] - raise RuntimeError( - "ROP node %s is set to bypass, publishing cannot continue.." - % rop.path() + raise PublishValidationError( + ("ROP node {} is set to bypass, publishing cannot " + "continue.".format(rop.path())), + title=self.label ) @classmethod def get_invalid(cls, instance): - rop = instance[0] + rop = hou.node(instance.get("instance_node")) if hasattr(rop, "isBypassed") and rop.isBypassed(): return [rop] diff --git a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py index d414920f8bb..41b5273e6ac 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py +++ b/openpype/hosts/houdini/plugins/publish/validate_camera_rop.py @@ -1,11 +1,13 @@ +# -*- coding: utf-8 -*- +"""Validator plugin for Houdini Camera ROP settings.""" import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateCameraROP(pyblish.api.InstancePlugin): """Validate Camera ROP settings.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder families = ["camera"] hosts = ["houdini"] label = "Camera ROP" @@ -14,30 +16,45 @@ def process(self, instance): import hou - node = instance[0] + node = hou.node(instance.data.get("instance_node")) if node.parm("use_sop_path").eval(): - raise RuntimeError( - "Alembic ROP for Camera export should not be " - "set to 'Use Sop Path'. Please disable." + raise PublishValidationError( + ("Alembic ROP for Camera export should not be " + "set to 'Use Sop Path'. Please disable."), + title=self.label ) # Get the root and objects parameter of the Alembic ROP node root = node.parm("root").eval() objects = node.parm("objects").eval() - assert root, "Root parameter must be set on Alembic ROP" - assert root.startswith("/"), "Root parameter must start with slash /" - assert objects, "Objects parameter must be set on Alembic ROP" - assert len(objects.split(" ")) == 1, "Must have only a single object." + errors = [] + if not root: + errors.append("Root parameter must be set on Alembic ROP") + if not root.startswith("/"): + errors.append("Root parameter must start with slash /") + if not objects: + errors.append("Objects parameter must be set on Alembic ROP") + if len(objects.split(" ")) != 1: + errors.append("Must have only a single object.") + + if errors: + for error in errors: + self.log.error(error) + raise PublishValidationError( + "Some checks failed, see validator log.", + title=self.label) # Check if the object exists and is a camera path = root + "/" + objects camera = hou.node(path) if not camera: - raise ValueError("Camera path does not exist: %s" % path) + raise PublishValidationError( + "Camera path does not exist: %s" % path, + title=self.label) if camera.type().name() != "cam": - raise ValueError( - "Object set in Alembic ROP is not a camera: " - "%s (type: %s)" % (camera, camera.type().name()) - ) + raise PublishValidationError( + ("Object set in Alembic ROP is not a camera: " + "{} (type: {})").format(camera, camera.type().name()), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py index 543539ffe3a..1d0377c8186 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_cop_output_node.py @@ -1,4 +1,9 @@ +# -*- coding: utf-8 -*- +import sys import pyblish.api +import six + +from openpype.pipeline import PublishValidationError class ValidateCopOutputNode(pyblish.api.InstancePlugin): @@ -20,9 +25,10 @@ def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + ("Output node(s) `{}` are incorrect. " + "See plug-in log for details.").format(invalid), + title=self.label ) @classmethod @@ -30,10 +36,19 @@ def get_invalid(cls, instance): import hou - output_node = instance.data["output_node"] + try: + output_node = instance.data["output_node"] + except KeyError: + six.reraise( + PublishValidationError, + PublishValidationError( + "Can't determine COP output node.", + title=cls.__name__), + sys.exc_info()[2] + ) if output_node is None: - node = instance[0] + node = hou.node(instance.get("instance_node")) cls.log.error( "COP Output node in '%s' does not exist. " "Ensure a valid COP output path is set." % node.path() @@ -54,7 +69,8 @@ def get_invalid(cls, instance): # For the sake of completeness also assert the category type # is Cop2 to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category - assert output_node.type().category().name() == "Cop2", ( - "Output node %s is not of category Cop2. This is a bug.." - % output_node.path() - ) + if output_node.type().category().name() != "Cop2": + raise PublishValidationError( + ("Output node %s is not of category Cop2. " + "This is a bug...").format(output_node.path()), + title=cls.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py index b26d28a1e7b..4584e78f4fe 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_file_extension.py +++ b/openpype/hosts/houdini/plugins/publish/validate_file_extension.py @@ -1,7 +1,11 @@ +# -*- coding: utf-8 -*- import os import pyblish.api from openpype.hosts.houdini.api import lib +from openpype.pipeline import PublishValidationError + +import hou class ValidateFileExtension(pyblish.api.InstancePlugin): @@ -29,15 +33,16 @@ def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "ROP node has incorrect " "file extension: %s" % invalid + raise PublishValidationError( + "ROP node has incorrect file extension: {}".format(invalid), + title=self.label ) @classmethod def get_invalid(cls, instance): # Get ROP node from instance - node = instance[0] + node = hou.node(instance.data["instance_node"]) # Create lookup for current family in instance families = [] @@ -53,7 +58,9 @@ def get_invalid(cls, instance): for family in families: extension = cls.family_extensions.get(family, None) if extension is None: - raise RuntimeError("Unsupported family: %s" % family) + raise PublishValidationError( + "Unsupported family: {}".format(family), + title=cls.label) if output_extension != extension: return [node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py index 76b5910576e..b5f6ba71e1d 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_frame_token.py +++ b/openpype/hosts/houdini/plugins/publish/validate_frame_token.py @@ -1,6 +1,7 @@ import pyblish.api from openpype.hosts.houdini.api import lib +import hou class ValidateFrameToken(pyblish.api.InstancePlugin): @@ -36,7 +37,7 @@ def process(self, instance): @classmethod def get_invalid(cls, instance): - node = instance[0] + node = hou.node(instance.get("instance_node")) # Check trange parm, 0 means Render Current Frame frame_range = node.evalParm("trange") diff --git a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py index f5f03aa844b..f1c52f22c19 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py +++ b/openpype/hosts/houdini/plugins/publish/validate_houdini_license_category.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateHoudiniCommercialLicense(pyblish.api.InstancePlugin): @@ -24,7 +26,7 @@ def process(self, instance): license = hou.licenseCategory() if license != hou.licenseCategoryType.Commercial: - raise RuntimeError( - "USD Publishing requires a full Commercial " - "license. You are on: %s" % license - ) + raise PublishValidationError( + ("USD Publishing requires a full Commercial " + "license. You are on: {}").format(license), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py index be6a798a95c..9d1f92a101f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_mkpaths_toggled.py @@ -1,11 +1,12 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateIntermediateDirectoriesChecked(pyblish.api.InstancePlugin): """Validate Create Intermediate Directories is enabled on ROP node.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder families = ["pointcache", "camera", "vdbcache"] hosts = ["houdini"] label = "Create Intermediate Directories Checked" @@ -14,10 +15,10 @@ def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Found ROP node with Create Intermediate " - "Directories turned off: %s" % invalid - ) + raise PublishValidationError( + ("Found ROP node with Create Intermediate " + "Directories turned off: {}".format(invalid)), + title=self.label) @classmethod def get_invalid(cls, instance): diff --git a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py index 76635d4ed57..f7c95aaf4ed 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_no_errors.py +++ b/openpype/hosts/houdini/plugins/publish/validate_no_errors.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError def cook_in_range(node, start, end): @@ -28,7 +29,7 @@ def get_errors(node): class ValidateNoErrors(pyblish.api.InstancePlugin): """Validate the Instance has no current cooking errors.""" - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder hosts = ["houdini"] label = "Validate no errors" @@ -37,7 +38,7 @@ def process(self, instance): validate_nodes = [] if len(instance) > 0: - validate_nodes.append(instance[0]) + validate_nodes.append(hou.node(instance.get("instance_node"))) output_node = instance.data.get("output_node") if output_node: validate_nodes.append(output_node) @@ -62,4 +63,6 @@ def process(self, instance): errors = get_errors(node) if errors: self.log.error(errors) - raise RuntimeError("Node has errors: %s" % node.path()) + raise PublishValidationError( + "Node has errors: {}".format(node.path()), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py index 7a8cd04f15f..d3a4c0cfbfc 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_primitive_hierarchy_paths.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError +import hou class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): @@ -19,19 +22,26 @@ class ValidatePrimitiveHierarchyPaths(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "See log for details. " "Invalid nodes: {0}".format(invalid) + raise PublishValidationError( + "See log for details. " "Invalid nodes: {0}".format(invalid), + title=self.label ) @classmethod def get_invalid(cls, instance): - import hou + output_node = instance.data.get("output_node") + rop_node = hou.node(instance.data["instance_node"]) - output = instance.data["output_node"] + if output_node is None: + cls.log.error( + "SOP Output node in '%s' does not exist. " + "Ensure a valid SOP output path is set." % rop_node.path() + ) + + return [rop_node.path()] - rop = instance[0] - build_from_path = rop.parm("build_from_path").eval() + build_from_path = rop_node.parm("build_from_path").eval() if not build_from_path: cls.log.debug( "Alembic ROP has 'Build from Path' disabled. " @@ -39,20 +49,20 @@ def get_invalid(cls, instance): ) return - path_attr = rop.parm("path_attrib").eval() + path_attr = rop_node.parm("path_attrib").eval() if not path_attr: cls.log.error( "The Alembic ROP node has no Path Attribute" "value set, but 'Build Hierarchy from Attribute'" "is enabled." ) - return [rop.path()] + return [rop_node.path()] cls.log.debug("Checking for attribute: %s" % path_attr) # Check if the primitive attribute exists frame = instance.data.get("frameStart", 0) - geo = output.geometryAtFrame(frame) + geo = output_node.geometryAtFrame(frame) # If there are no primitives on the current frame then we can't # check whether the path names are correct. So we'll just issue a @@ -73,7 +83,7 @@ def get_invalid(cls, instance): "Geometry Primitives are missing " "path attribute: `%s`" % path_attr ) - return [output.path()] + return [output_node.path()] # Ensure at least a single string value is present if not attrib.strings(): @@ -81,7 +91,7 @@ def get_invalid(cls, instance): "Primitive path attribute has no " "string values: %s" % path_attr ) - return [output.path()] + return [output_node.path()] paths = geo.primStringAttribValues(path_attr) # Ensure all primitives are set to a valid path @@ -93,4 +103,4 @@ def get_invalid(cls, instance): "Prims have no value for attribute `%s` " "(%s of %s prims)" % (path_attr, len(invalid_prims), num_prims) ) - return [output.path()] + return [output_node.path()] diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py index 0ab182c5848..4e8e5fc0e86 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish.py @@ -1,7 +1,9 @@ +# -*-coding: utf-8 -*- import pyblish.api from openpype.hosts.houdini.api import lib from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline import PublishValidationError import hou @@ -27,17 +29,24 @@ def process(self, context): # We ensure it's a shell node and that it has the pre-render script # set correctly. Plus the shell script it will trigger should be # completely empty (doing nothing) - assert node.type().name() == "shell", "Must be shell ROP node" - assert node.parm("command").eval() == "", "Must have no command" - assert not node.parm("shellexec").eval(), "Must not execute in shell" - assert ( - node.parm("prerender").eval() == cmd - ), "REMOTE_PUBLISH node does not have correct prerender script." - assert ( - node.parm("lprerender").eval() == "python" - ), "REMOTE_PUBLISH node prerender script type not set to 'python'" + if node.type().name() != "shell": + self.raise_error("Must be shell ROP node") + if node.parm("command").eval() != "": + self.raise_error("Must have no command") + if node.parm("shellexec").eval(): + self.raise_error("Must not execute in shell") + if node.parm("prerender").eval() != cmd: + self.raise_error(("REMOTE_PUBLISH node does not have " + "correct prerender script.")) + if node.parm("lprerender").eval() != "python": + self.raise_error(("REMOTE_PUBLISH node prerender script " + "type not set to 'python'")) @classmethod def repair(cls, context): """(Re)create the node if it fails to pass validation.""" lib.create_remote_publish_node(force=True) + + def raise_error(self, message): + self.log.error(message) + raise PublishValidationError(message, title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py index afc8df75285..8ec62f4e85a 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py +++ b/openpype/hosts/houdini/plugins/publish/validate_remote_publish_enabled.py @@ -1,7 +1,9 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou from openpype.pipeline.publish import RepairContextAction +from openpype.pipeline import PublishValidationError class ValidateRemotePublishEnabled(pyblish.api.ContextPlugin): @@ -18,10 +20,12 @@ def process(self, context): node = hou.node("/out/REMOTE_PUBLISH") if not node: - raise RuntimeError("Missing REMOTE_PUBLISH node.") + raise PublishValidationError( + "Missing REMOTE_PUBLISH node.", title=self.label) if node.isBypassed(): - raise RuntimeError("REMOTE_PUBLISH must not be bypassed.") + raise PublishValidationError( + "REMOTE_PUBLISH must not be bypassed.", title=self.label) @classmethod def repair(cls, context): @@ -29,7 +33,8 @@ def repair(cls, context): node = hou.node("/out/REMOTE_PUBLISH") if not node: - raise RuntimeError("Missing REMOTE_PUBLISH node.") + raise PublishValidationError( + "Missing REMOTE_PUBLISH node.", title=cls.label) cls.log.info("Disabling bypass on /out/REMOTE_PUBLISH") node.bypass(False) diff --git a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py index a5a07b1b1a2..ed7f4387294 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_sop_output_node.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateSopOutputNode(pyblish.api.InstancePlugin): @@ -22,9 +24,9 @@ def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + "Output node(s) are incorrect", + title="Invalid output node(s)" ) @classmethod @@ -32,10 +34,10 @@ def get_invalid(cls, instance): import hou - output_node = instance.data["output_node"] + output_node = instance.data.get("output_node") if output_node is None: - node = instance[0] + node = hou.node(instance.data["instance_node"]) cls.log.error( "SOP Output node in '%s' does not exist. " "Ensure a valid SOP output path is set." % node.path() @@ -56,10 +58,11 @@ def get_invalid(cls, instance): # For the sake of completeness also assert the category type # is Sop to avoid potential edge case scenarios even though # the isinstance check above should be stricter than this category - assert output_node.type().category().name() == "Sop", ( - "Output node %s is not of category Sop. This is a bug.." - % output_node.path() - ) + if output_node.type().category().name() != "Sop": + raise PublishValidationError( + ("Output node {} is not of category Sop. " + "This is a bug.").format(output_node.path()), + title=cls.label) # Ensure the node is cooked and succeeds to cook so we can correctly # check for its geometry data. diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py index ac0181aed29..a0e23024951 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_layer_path_backslashes.py @@ -1,6 +1,10 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.pipeline import PublishValidationError + +import hou class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): @@ -24,7 +28,7 @@ class ValidateUSDLayerPathBackslashes(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) @@ -44,7 +48,7 @@ def process(self, instance): invalid.append(layer) if invalid: - raise RuntimeError( + raise PublishValidationError(( "Loaded layers have backslashes. " - "This is invalid for HUSK USD rendering." - ) + "This is invalid for HUSK USD rendering."), + title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py index 2fd2f5eb9fa..a55eb70cb2f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_model_and_shade.py @@ -1,10 +1,13 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib - +from openpype.pipeline import PublishValidationError from pxr import UsdShade, UsdRender, UsdLux +import hou + def fullname(o): """Get fully qualified class name""" @@ -37,7 +40,7 @@ class ValidateUsdModel(pyblish.api.InstancePlugin): def process(self, instance): - rop = instance[0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) @@ -55,7 +58,8 @@ def process(self, instance): if invalid: prim_paths = sorted([str(prim.GetPath()) for prim in invalid]) - raise RuntimeError("Found invalid primitives: %s" % prim_paths) + raise PublishValidationError( + "Found invalid primitives: {}".format(prim_paths)) class ValidateUsdShade(ValidateUsdModel): diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py index 1f10fafdf4b..af21efcafce 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_output_node.py @@ -1,4 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api +from openpype.pipeline import PublishValidationError class ValidateUSDOutputNode(pyblish.api.InstancePlugin): @@ -20,9 +22,10 @@ def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Output node(s) `%s` are incorrect. " - "See plug-in log for details." % invalid + raise PublishValidationError( + ("Output node(s) `{}` are incorrect. " + "See plug-in log for details.").format(invalid), + title=self.label ) @classmethod @@ -33,7 +36,7 @@ def get_invalid(cls, instance): output_node = instance.data["output_node"] if output_node is None: - node = instance[0] + node = hou.node(instance.get("instance_node")) cls.log.error( "USD node '%s' LOP path does not exist. " "Ensure a valid LOP path is set." % node.path() diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py index 36336a03ae0..02c44ab94e6 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_render_product_names.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- +import os import pyblish.api -import os +from openpype.pipeline import PublishValidationError class ValidateUSDRenderProductNames(pyblish.api.InstancePlugin): @@ -28,4 +30,5 @@ def process(self, instance): if invalid: for message in invalid: self.log.error(message) - raise RuntimeError("USD Render Paths are invalid.") + raise PublishValidationError( + "USD Render Paths are invalid.", title=self.label) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py index fb1094e6b5a..01ebc0e828e 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_setdress.py @@ -1,6 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api import openpype.hosts.houdini.api.usd as hou_usdlib +from openpype.pipeline import PublishValidationError class ValidateUsdSetDress(pyblish.api.InstancePlugin): @@ -20,8 +22,9 @@ class ValidateUsdSetDress(pyblish.api.InstancePlugin): def process(self, instance): from pxr import UsdGeom + import hou - rop = instance[0] + rop = hou.node(instance.get("instance_node")) lop_path = hou_usdlib.get_usd_rop_loppath(rop) stage = lop_path.stage(apply_viewport_overrides=False) @@ -47,8 +50,9 @@ def process(self, instance): invalid.append(node) if invalid: - raise RuntimeError( + raise PublishValidationError(( "SetDress contains local geometry. " "This is not allowed, it must be an assembly " - "of referenced assets." + "of referenced assets."), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index f08c7c72c50..c4f118ac3b2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- import re import pyblish.api @@ -5,6 +6,7 @@ from openpype.client import get_subset_by_name from openpype.pipeline import legacy_io from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): @@ -32,7 +34,8 @@ def process(self, instance): project_name, model_subset, asset_doc["_id"], fields=["_id"] ) if not subset_doc: - raise RuntimeError( - "USD Model subset not found: " - "%s (%s)" % (model_subset, asset_name) + raise PublishValidationError( + ("USD Model subset not found: " + "{} ({})").format(model_subset, asset_name), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py index a4902b48a92..bd3366a424f 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_workspace.py @@ -1,5 +1,6 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError import hou @@ -12,14 +13,14 @@ class ValidateUsdShadeWorkspace(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + order = pyblish.api.ValidatorOrder hosts = ["houdini"] families = ["usdShade"] label = "USD Shade Workspace" def process(self, instance): - rop = instance[0] + rop = hou.node(instance.get("instance_node")) workspace = rop.parent() definition = workspace.type().definition() @@ -39,13 +40,14 @@ def process(self, instance): if node_type != other_node_type: continue - # Get highest version + # Get the highest version highest = max(highest, other_version) if version != highest: - raise RuntimeError( - "Shading Workspace is not the latest version." - " Found %s. Latest is %s." % (version, highest) + raise PublishValidationError( + ("Shading Workspace is not the latest version." + " Found {}. Latest is {}.").format(version, highest), + title=self.label ) # There were some issues with the editable node not having the right @@ -56,8 +58,9 @@ def process(self, instance): ) rop_value = rop.parm("lopoutput").rawValue() if rop_value != value: - raise RuntimeError( - "Shading Workspace has invalid 'lopoutput'" - " parameter value. The Shading Workspace" - " needs to be reset to its default values." + raise PublishValidationError( + ("Shading Workspace has invalid 'lopoutput'" + " parameter value. The Shading Workspace" + " needs to be reset to its default values."), + title=self.label ) diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py index ac408bc842a..1f9ccc9c427 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_input_node.py @@ -1,5 +1,8 @@ +# -*- coding: utf-8 -*- import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import ( + PublishValidationError +) class ValidateVDBInputNode(pyblish.api.InstancePlugin): @@ -16,7 +19,7 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Input Node (VDB)" @@ -24,8 +27,10 @@ class ValidateVDBInputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Node connected to the output node is not" "of type VDB!" + raise PublishValidationError( + self, + "Node connected to the output node is not of type VDB", + title=self.label ) @classmethod diff --git a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py index 55ed581d4c3..61c1209fc90 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py +++ b/openpype/hosts/houdini/plugins/publish/validate_vdb_output_node.py @@ -1,6 +1,7 @@ +# -*- coding: utf-8 -*- import pyblish.api import hou -from openpype.pipeline.publish import ValidateContentsOrder +from openpype.pipeline import PublishValidationError class ValidateVDBOutputNode(pyblish.api.InstancePlugin): @@ -17,7 +18,7 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): """ - order = ValidateContentsOrder + 0.1 + order = pyblish.api.ValidatorOrder + 0.1 families = ["vdbcache"] hosts = ["houdini"] label = "Validate Output Node (VDB)" @@ -25,8 +26,9 @@ class ValidateVDBOutputNode(pyblish.api.InstancePlugin): def process(self, instance): invalid = self.get_invalid(instance) if invalid: - raise RuntimeError( - "Node connected to the output node is not" " of type VDB!" + raise PublishValidationError( + "Node connected to the output node is not" " of type VDB!", + title=self.label ) @classmethod @@ -36,7 +38,7 @@ def get_invalid(cls, instance): if node is None: cls.log.error( "SOP path is not correctly set on " - "ROP node '%s'." % instance[0].path() + "ROP node '%s'." % instance.get("instance_node") ) return [instance] diff --git a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py index 560b355e215..7707cc2dba2 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py +++ b/openpype/hosts/houdini/plugins/publish/validate_workfile_paths.py @@ -1,11 +1,17 @@ # -*- coding: utf-8 -*- import pyblish.api import hou +from openpype.pipeline import ( + PublishValidationError, + OptionalPyblishPluginMixin +) +from openpype.pipeline.publish import RepairAction from openpype.pipeline.publish import RepairAction -class ValidateWorkfilePaths(pyblish.api.InstancePlugin): +class ValidateWorkfilePaths( + pyblish.api.InstancePlugin, OptionalPyblishPluginMixin): """Validate workfile paths so they are absolute.""" order = pyblish.api.ValidatorOrder @@ -19,6 +25,8 @@ class ValidateWorkfilePaths(pyblish.api.InstancePlugin): prohibited_vars = ["$HIP", "$JOB"] def process(self, instance): + if not self.is_active(instance.data): + return invalid = self.get_invalid() self.log.info( "node types to check: {}".format(", ".join(self.node_types))) @@ -30,15 +38,16 @@ def process(self, instance): self.log.error( "{}: {}".format(param.path(), param.unexpandedString())) - raise RuntimeError("Invalid paths found") + raise PublishValidationError( + "Invalid paths found", title=self.label) @classmethod def get_invalid(cls): invalid = [] for param, _ in hou.fileReferences(): - if param is None: + # it might return None for some reason + if not param: continue - # skip nodes we are not interested in if param.node().type().name() not in cls.node_types: continue diff --git a/openpype/hosts/houdini/startup/MainMenuCommon.xml b/openpype/hosts/houdini/startup/MainMenuCommon.xml index abfa3f136e3..c08114b71bc 100644 --- a/openpype/hosts/houdini/startup/MainMenuCommon.xml +++ b/openpype/hosts/houdini/startup/MainMenuCommon.xml @@ -1,10 +1,10 @@ - + - + - + - + bool: + node = rt.getNodeByName(node_name) + if not node: + return False + + for k, v in data.items(): + if isinstance(v, (dict, list)): + rt.setUserProp(node, k, f'{JSON_PREFIX}{json.dumps(v)}') + else: + rt.setUserProp(node, k, v) + + return True + + +def lsattr( + attr: str, + value: Union[str, None] = None, + root: Union[str, None] = None) -> list: + """List nodes having attribute with specified value. + + Args: + attr (str): Attribute name to match. + value (str, Optional): Value to match, of omitted, all nodes + with specified attribute are returned no matter of value. + root (str, Optional): Root node name. If omitted, scene root is used. + + Returns: + list of nodes. + """ + root = rt.rootnode if root is None else rt.getNodeByName(root) + + def output_node(node, nodes): + nodes.append(node) + for child in node.Children: + output_node(child, nodes) + + nodes = [] + output_node(root, nodes) + return [ + n for n in nodes + if rt.getUserProp(n, attr) == value + ] if value else [ + n for n in nodes + if rt.getUserProp(n, attr) + ] + + +def read(container) -> dict: + data = {} + props = rt.getUserPropBuffer(container) + # this shouldn't happen but let's guard against it anyway + if not props: + return data + + for line in props.split("\r\n"): + try: + key, value = line.split("=") + except ValueError: + # if the line cannot be split we can't really parse it + continue + + value = value.strip() + if isinstance(value.strip(), six.string_types) and \ + value.startswith(JSON_PREFIX): + try: + value = json.loads(value[len(JSON_PREFIX):]) + except json.JSONDecodeError: + # not a json + pass + + data[key.strip()] = value + + data["instance_node"] = container.name + + return data + + +@contextlib.contextmanager +def maintained_selection(): + previous_selection = rt.getCurrentSelection() + try: + yield + finally: + if previous_selection: + rt.select(previous_selection) + else: + rt.select() + + +def get_all_children(parent, node_type=None): + """Handy function to get all the children of a given node + + Args: + parent (3dsmax Node1): Node to get all children of. + node_type (None, runtime.class): give class to check for + e.g. rt.FFDBox/rt.GeometryClass etc. + + Returns: + list: list of all children of the parent node + """ + def list_children(node): + children = [] + for c in node.Children: + children.append(c) + children = children + list_children(c) + return children + child_list = list_children(parent) + + return ([x for x in child_list if rt.superClassOf(x) == node_type] + if node_type else child_list) diff --git a/openpype/hosts/max/api/menu.py b/openpype/hosts/max/api/menu.py new file mode 100644 index 00000000000..d1913c51e0f --- /dev/null +++ b/openpype/hosts/max/api/menu.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +"""3dsmax menu definition of OpenPype.""" +from Qt import QtWidgets, QtCore +from pymxs import runtime as rt + +from openpype.tools.utils import host_tools + + +class OpenPypeMenu(object): + """Object representing OpenPype menu. + + This is using "hack" to inject itself before "Help" menu of 3dsmax. + For some reason `postLoadingMenus` event doesn't fire, and main menu + if probably re-initialized by menu templates, se we wait for at least + 1 event Qt event loop before trying to insert. + + """ + + def __init__(self): + super().__init__() + self.main_widget = self.get_main_widget() + self.menu = None + + timer = QtCore.QTimer() + # set number of event loops to wait. + timer.setInterval(1) + timer.timeout.connect(self._on_timer) + timer.start() + + self._timer = timer + self._counter = 0 + + def _on_timer(self): + if self._counter < 1: + self._counter += 1 + return + + self._counter = 0 + self._timer.stop() + self.build_openpype_menu() + + @staticmethod + def get_main_widget(): + """Get 3dsmax main window.""" + return QtWidgets.QWidget.find(rt.windows.getMAXHWND()) + + def get_main_menubar(self) -> QtWidgets.QMenuBar: + """Get main Menubar by 3dsmax main window.""" + return list(self.main_widget.findChildren(QtWidgets.QMenuBar))[0] + + def get_or_create_openpype_menu( + self, name: str = "&OpenPype", + before: str = "&Help") -> QtWidgets.QAction: + """Create OpenPype menu. + + Args: + name (str, Optional): OpenPypep menu name. + before (str, Optional): Name of the 3dsmax main menu item to + add OpenPype menu before. + + Returns: + QtWidgets.QAction: OpenPype menu action. + + """ + if self.menu is not None: + return self.menu + + menu_bar = self.get_main_menubar() + menu_items = menu_bar.findChildren( + QtWidgets.QMenu, options=QtCore.Qt.FindDirectChildrenOnly) + help_action = None + for item in menu_items: + if name in item.title(): + # we already have OpenPype menu + return item + + if before in item.title(): + help_action = item.menuAction() + + op_menu = QtWidgets.QMenu("&OpenPype") + menu_bar.insertMenu(help_action, op_menu) + + self.menu = op_menu + return op_menu + + def build_openpype_menu(self) -> QtWidgets.QAction: + """Build items in OpenPype menu.""" + openpype_menu = self.get_or_create_openpype_menu() + load_action = QtWidgets.QAction("Load...", openpype_menu) + load_action.triggered.connect(self.load_callback) + openpype_menu.addAction(load_action) + + publish_action = QtWidgets.QAction("Publish...", openpype_menu) + publish_action.triggered.connect(self.publish_callback) + openpype_menu.addAction(publish_action) + + manage_action = QtWidgets.QAction("Manage...", openpype_menu) + manage_action.triggered.connect(self.manage_callback) + openpype_menu.addAction(manage_action) + + library_action = QtWidgets.QAction("Library...", openpype_menu) + library_action.triggered.connect(self.library_callback) + openpype_menu.addAction(library_action) + + openpype_menu.addSeparator() + + workfiles_action = QtWidgets.QAction("Work Files...", openpype_menu) + workfiles_action.triggered.connect(self.workfiles_callback) + openpype_menu.addAction(workfiles_action) + return openpype_menu + + def load_callback(self): + """Callback to show Loader tool.""" + host_tools.show_loader(parent=self.main_widget) + + def publish_callback(self): + """Callback to show Publisher tool.""" + host_tools.show_publisher(parent=self.main_widget) + + def manage_callback(self): + """Callback to show Scene Manager/Inventory tool.""" + host_tools.show_subset_manager(parent=self.main_widget) + + def library_callback(self): + """Callback to show Library Loader tool.""" + host_tools.show_library_loader(parent=self.main_widget) + + def workfiles_callback(self): + """Callback to show Workfiles tool.""" + host_tools.show_workfiles(parent=self.main_widget) diff --git a/openpype/hosts/max/api/pipeline.py b/openpype/hosts/max/api/pipeline.py new file mode 100644 index 00000000000..f3cdf245fba --- /dev/null +++ b/openpype/hosts/max/api/pipeline.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +"""Pipeline tools for OpenPype Houdini integration.""" +import os +import logging + +import json + +from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher +import pyblish.api +from openpype.pipeline import ( + register_creator_plugin_path, + register_loader_plugin_path, + AVALON_CONTAINER_ID, +) +from openpype.hosts.max.api.menu import OpenPypeMenu +from openpype.hosts.max.api import lib +from openpype.hosts.max import MAX_HOST_DIR + +from pymxs import runtime as rt # noqa + +log = logging.getLogger("openpype.hosts.max") + +PLUGINS_DIR = os.path.join(MAX_HOST_DIR, "plugins") +PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") +LOAD_PATH = os.path.join(PLUGINS_DIR, "load") +CREATE_PATH = os.path.join(PLUGINS_DIR, "create") +INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") + + +class MaxHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): + + name = "max" + menu = None + + def __init__(self): + super(MaxHost, self).__init__() + self._op_events = {} + self._has_been_setup = False + + def install(self): + pyblish.api.register_host("max") + + pyblish.api.register_plugin_path(PUBLISH_PATH) + register_loader_plugin_path(LOAD_PATH) + register_creator_plugin_path(CREATE_PATH) + + # self._register_callbacks() + self.menu = OpenPypeMenu() + + self._has_been_setup = True + + def has_unsaved_changes(self): + # TODO: how to get it from 3dsmax? + return True + + def get_workfile_extensions(self): + return [".max"] + + def save_workfile(self, dst_path=None): + rt.saveMaxFile(dst_path) + return dst_path + + def open_workfile(self, filepath): + rt.checkForSave() + rt.loadMaxFile(filepath) + return filepath + + def get_current_workfile(self): + return os.path.join(rt.maxFilePath, rt.maxFileName) + + def get_containers(self): + return ls() + + def _register_callbacks(self): + rt.callbacks.removeScripts(id=rt.name("OpenPypeCallbacks")) + + rt.callbacks.addScript( + rt.Name("postLoadingMenus"), + self._deferred_menu_creation, id=rt.Name('OpenPypeCallbacks')) + + def _deferred_menu_creation(self): + self.log.info("Building menu ...") + self.menu = OpenPypeMenu() + + @staticmethod + def create_context_node(): + """Helper for creating context holding node.""" + + root_scene = rt.rootScene + + create_attr_script = (""" +attributes "OpenPypeContext" +( + parameters main rollout:params + ( + context type: #string + ) + + rollout params "OpenPype Parameters" + ( + editText editTextContext "Context" type: #string + ) +) + """) + + attr = rt.execute(create_attr_script) + rt.custAttributes.add(root_scene, attr) + + return root_scene.OpenPypeContext.context + + def update_context_data(self, data, changes): + try: + _ = rt.rootScene.OpenPypeContext.context + except AttributeError: + # context node doesn't exists + self.create_context_node() + + rt.rootScene.OpenPypeContext.context = json.dumps(data) + + def get_context_data(self): + try: + context = rt.rootScene.OpenPypeContext.context + except AttributeError: + # context node doesn't exists + context = self.create_context_node() + if not context: + context = "{}" + return json.loads(context) + + def save_file(self, dst_path=None): + # Force forwards slashes to avoid segfault + dst_path = dst_path.replace("\\", "/") + rt.saveMaxFile(dst_path) + + +def ls() -> list: + """Get all OpenPype instances.""" + objs = rt.objects + containers = [ + obj for obj in objs + if rt.getUserProp(obj, "id") == AVALON_CONTAINER_ID + ] + + for container in sorted(containers, key=lambda name: container.name): + yield lib.read(container) diff --git a/openpype/hosts/max/api/plugin.py b/openpype/hosts/max/api/plugin.py new file mode 100644 index 00000000000..4788bfd3839 --- /dev/null +++ b/openpype/hosts/max/api/plugin.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +"""3dsmax specific Avalon/Pyblish plugin definitions.""" +from pymxs import runtime as rt +import six +from abc import ABCMeta +from openpype.pipeline import ( + CreatorError, + Creator, + CreatedInstance +) +from openpype.lib import BoolDef +from .lib import imprint, read, lsattr + + +class OpenPypeCreatorError(CreatorError): + pass + + +class MaxCreatorBase(object): + + @staticmethod + def cache_subsets(shared_data): + if shared_data.get("max_cached_subsets") is None: + shared_data["max_cached_subsets"] = {} + cached_instances = lsattr("id", "pyblish.avalon.instance") + for i in cached_instances: + creator_id = rt.getUserProp(i, "creator_identifier") + if creator_id not in shared_data["max_cached_subsets"]: + shared_data["max_cached_subsets"][creator_id] = [i.name] + else: + shared_data[ + "max_cached_subsets"][creator_id].append(i.name) # noqa + return shared_data + + @staticmethod + def create_instance_node(node_name: str, parent: str = ""): + parent_node = rt.getNodeByName(parent) if parent else rt.rootScene + if not parent_node: + raise OpenPypeCreatorError(f"Specified parent {parent} not found") + + container = rt.container(name=node_name) + container.Parent = parent_node + + return container + + +@six.add_metaclass(ABCMeta) +class MaxCreator(Creator, MaxCreatorBase): + selected_nodes = [] + + def create(self, subset_name, instance_data, pre_create_data): + if pre_create_data.get("use_selection"): + self.selected_nodes = rt.getCurrentSelection() + + instance_node = self.create_instance_node(subset_name) + instance_data["instance_node"] = instance_node.name + instance = CreatedInstance( + self.family, + subset_name, + instance_data, + self + ) + for node in self.selected_nodes: + node.Parent = instance_node + + self._add_instance_to_context(instance) + imprint(instance_node.name, instance.data_to_store()) + + return instance + + def collect_instances(self): + self.cache_subsets(self.collection_shared_data) + for instance in self.collection_shared_data[ + "max_cached_subsets"].get(self.identifier, []): + created_instance = CreatedInstance.from_existing( + read(rt.getNodeByName(instance)), self + ) + self._add_instance_to_context(created_instance) + + def update_instances(self, update_list): + for created_inst, _changes in update_list: + instance_node = created_inst.get("instance_node") + + new_values = { + key: new_value + for key, (_old_value, new_value) in _changes.items() + } + imprint( + instance_node, + new_values, + ) + + def remove_instances(self, instances): + """Remove specified instance from the scene. + + This is only removing `id` parameter so instance is no longer + instance, because it might contain valuable data for artist. + + """ + for instance in instances: + instance_node = rt.getNodeByName( + instance.data.get("instance_node")) + if instance_node: + rt.delete(rt.getNodeByName(instance_node)) + + self._remove_instance_from_context(instance) + + def get_pre_create_attr_defs(self): + return [ + BoolDef("use_selection", label="Use selection") + ] diff --git a/openpype/hosts/max/hooks/set_paths.py b/openpype/hosts/max/hooks/set_paths.py new file mode 100644 index 00000000000..3db53063441 --- /dev/null +++ b/openpype/hosts/max/hooks/set_paths.py @@ -0,0 +1,17 @@ +from openpype.lib import PreLaunchHook + + +class SetPath(PreLaunchHook): + """Set current dir to workdir. + + Hook `GlobalHostDataHook` must be executed before this hook. + """ + app_groups = ["max"] + + def execute(self): + workdir = self.launch_context.env.get("AVALON_WORKDIR", "") + if not workdir: + self.log.warning("BUG: Workdir is not filled.") + return + + self.launch_context.kwargs["cwd"] = workdir diff --git a/openpype/hosts/max/plugins/__init__.py b/openpype/hosts/max/plugins/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/openpype/hosts/max/plugins/create/create_pointcache.py b/openpype/hosts/max/plugins/create/create_pointcache.py new file mode 100644 index 00000000000..32f0838471e --- /dev/null +++ b/openpype/hosts/max/plugins/create/create_pointcache.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +"""Creator plugin for creating pointcache alembics.""" +from openpype.hosts.max.api import plugin +from openpype.pipeline import CreatedInstance + + +class CreatePointCache(plugin.MaxCreator): + identifier = "io.openpype.creators.max.pointcache" + label = "Point Cache" + family = "pointcache" + icon = "gear" + + def create(self, subset_name, instance_data, pre_create_data): + # from pymxs import runtime as rt + + _ = super(CreatePointCache, self).create( + subset_name, + instance_data, + pre_create_data) # type: CreatedInstance + + # for additional work on the node: + # instance_node = rt.getNodeByName(instance.get("instance_node")) diff --git a/openpype/hosts/max/plugins/load/load_pointcache.py b/openpype/hosts/max/plugins/load/load_pointcache.py new file mode 100644 index 00000000000..285d84b7b6d --- /dev/null +++ b/openpype/hosts/max/plugins/load/load_pointcache.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +"""Simple alembic loader for 3dsmax. + +Because of limited api, alembics can be only loaded, but not easily updated. + +""" +import os +from openpype.pipeline import ( + load +) + + +class AbcLoader(load.LoaderPlugin): + """Alembic loader.""" + + families = ["model", "animation", "pointcache"] + label = "Load Alembic" + representations = ["abc"] + order = -10 + icon = "code-fork" + color = "orange" + + def load(self, context, name=None, namespace=None, data=None): + from pymxs import runtime as rt + + file_path = os.path.normpath(self.fname) + + abc_before = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + abc_export_cmd = (f""" +AlembicImport.ImportToRoot = false + +importFile @"{file_path}" #noPrompt + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + rt.execute(abc_export_cmd) + + abc_after = { + c for c in rt.rootNode.Children + if rt.classOf(c) == rt.AlembicContainer + } + + # This should yield new AlembicContainer node + abc_containers = abc_after.difference(abc_before) + + if len(abc_containers) != 1: + self.log.error("Something failed when loading.") + + abc_container = abc_containers.pop() + + container_name = f"{name}_CON" + container = rt.container(name=container_name) + abc_container.Parent = container + + return container + + def remove(self, container): + from pymxs import runtime as rt + + node = container["node"] + rt.delete(node) diff --git a/openpype/hosts/max/plugins/publish/collect_workfile.py b/openpype/hosts/max/plugins/publish/collect_workfile.py new file mode 100644 index 00000000000..3500b2735cd --- /dev/null +++ b/openpype/hosts/max/plugins/publish/collect_workfile.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +"""Collect current work file.""" +import os +import pyblish.api + +from pymxs import runtime as rt +from openpype.pipeline import legacy_io + + +class CollectWorkfile(pyblish.api.ContextPlugin): + """Inject the current working file into context""" + + order = pyblish.api.CollectorOrder - 0.01 + label = "Collect 3dsmax Workfile" + hosts = ['max'] + + def process(self, context): + """Inject the current working file.""" + folder = rt.maxFilePath + file = rt.maxFileName + if not folder or not file: + self.log.error("Scene is not saved.") + current_file = os.path.join(folder, file) + + context.data['currentFile'] = current_file + + filename, ext = os.path.splitext(file) + + task = legacy_io.Session["AVALON_TASK"] + + data = {} + + # create instance + instance = context.create_instance(name=filename) + subset = 'workfile' + task.capitalize() + + data.update({ + "subset": subset, + "asset": os.getenv("AVALON_ASSET", None), + "label": subset, + "publish": True, + "family": 'workfile', + "families": ['workfile'], + "setMembers": [current_file], + "frameStart": context.data['frameStart'], + "frameEnd": context.data['frameEnd'], + "handleStart": context.data['handleStart'], + "handleEnd": context.data['handleEnd'] + }) + + data['representations'] = [{ + 'name': ext.lstrip("."), + 'ext': ext.lstrip("."), + 'files': file, + "stagingDir": folder, + }] + + instance.data.update(data) + + self.log.info('Collected instance: {}'.format(file)) + self.log.info('Scene path: {}'.format(current_file)) + self.log.info('staging Dir: {}'.format(folder)) + self.log.info('subset: {}'.format(subset)) diff --git a/openpype/hosts/max/plugins/publish/extract_pointcache.py b/openpype/hosts/max/plugins/publish/extract_pointcache.py new file mode 100644 index 00000000000..904c1656dad --- /dev/null +++ b/openpype/hosts/max/plugins/publish/extract_pointcache.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +""" +Export alembic file. + +Note: + Parameters on AlembicExport (AlembicExport.Parameter): + + ParticleAsMesh (bool): Sets whether particle shapes are exported + as meshes. + AnimTimeRange (enum): How animation is saved: + #CurrentFrame: saves current frame + #TimeSlider: saves the active time segments on time slider (default) + #StartEnd: saves a range specified by the Step + StartFrame (int) + EnFrame (int) + ShapeSuffix (bool): When set to true, appends the string "Shape" to the + name of each exported mesh. This property is set to false by default. + SamplesPerFrame (int): Sets the number of animation samples per frame. + Hidden (bool): When true, export hidden geometry. + UVs (bool): When true, export the mesh UV map channel. + Normals (bool): When true, export the mesh normals. + VertexColors (bool): When true, export the mesh vertex color map 0 and the + current vertex color display data when it differs + ExtraChannels (bool): When true, export the mesh extra map channels + (map channels greater than channel 1) + Velocity (bool): When true, export the meh vertex and particle velocity + data. + MaterialIDs (bool): When true, export the mesh material ID as + Alembic face sets. + Visibility (bool): When true, export the node visibility data. + LayerName (bool): When true, export the node layer name as an Alembic + object property. + MaterialName (bool): When true, export the geometry node material name as + an Alembic object property + ObjectID (bool): When true, export the geometry node g-buffer object ID as + an Alembic object property. + CustomAttributes (bool): When true, export the node and its modifiers + custom attributes into an Alembic object compound property. +""" +import os +import pyblish.api +from openpype.pipeline import publish +from pymxs import runtime as rt +from openpype.hosts.max.api import ( + maintained_selection, + get_all_children +) + + +class ExtractAlembic(publish.Extractor): + order = pyblish.api.ExtractorOrder + label = "Extract Pointcache" + hosts = ["max"] + families = ["pointcache", "camera"] + + def process(self, instance): + start = float(instance.data.get("frameStartHandle", 1)) + end = float(instance.data.get("frameEndHandle", 1)) + + container = instance.data["instance_node"] + + self.log.info("Extracting pointcache ...") + + parent_dir = self.staging_dir(instance) + file_name = "{name}.abc".format(**instance.data) + path = os.path.join(parent_dir, file_name) + + # We run the render + self.log.info("Writing alembic '%s' to '%s'" % (file_name, + parent_dir)) + + abc_export_cmd = ( + f""" +AlembicExport.ArchiveType = #ogawa +AlembicExport.CoordinateSystem = #maya +AlembicExport.StartFrame = {start} +AlembicExport.EndFrame = {end} + +exportFile @"{path}" #noPrompt selectedOnly:on using:AlembicExport + + """) + + self.log.debug(f"Executing command: {abc_export_cmd}") + + with maintained_selection(): + # select and export + + rt.select(get_all_children(rt.getNodeByName(container))) + rt.execute(abc_export_cmd) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'abc', + 'ext': 'abc', + 'files': file_name, + "stagingDir": parent_dir, + } + instance.data["representations"].append(representation) diff --git a/openpype/hosts/max/plugins/publish/validate_scene_saved.py b/openpype/hosts/max/plugins/publish/validate_scene_saved.py new file mode 100644 index 00000000000..8506b173159 --- /dev/null +++ b/openpype/hosts/max/plugins/publish/validate_scene_saved.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from openpype.pipeline import PublishValidationError +from pymxs import runtime as rt + + +class ValidateSceneSaved(pyblish.api.InstancePlugin): + """Validate that workfile was saved.""" + + order = pyblish.api.ValidatorOrder + families = ["workfile"] + hosts = ["max"] + label = "Validate Workfile is saved" + + def process(self, instance): + if not rt.maxFilePath or not rt.maxFileName: + raise PublishValidationError( + "Workfile is not saved", title=self.label) diff --git a/openpype/hosts/max/startup/startup.ms b/openpype/hosts/max/startup/startup.ms new file mode 100644 index 00000000000..aee40eb6bc4 --- /dev/null +++ b/openpype/hosts/max/startup/startup.ms @@ -0,0 +1,9 @@ +-- OpenPype Init Script +( + local sysPath = dotNetClass "System.IO.Path" + local sysDir = dotNetClass "System.IO.Directory" + local localScript = getThisScriptFilename() + local startup = sysPath.Combine (sysPath.GetDirectoryName localScript) "startup.py" + + python.ExecuteFile startup +) \ No newline at end of file diff --git a/openpype/hosts/max/startup/startup.py b/openpype/hosts/max/startup/startup.py new file mode 100644 index 00000000000..37bcef5db14 --- /dev/null +++ b/openpype/hosts/max/startup/startup.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +from openpype.hosts.max.api import MaxHost +from openpype.pipeline import install_host + +host = MaxHost() +install_host(host) diff --git a/openpype/hosts/maya/api/gltf.py b/openpype/hosts/maya/api/gltf.py new file mode 100644 index 00000000000..2a983f1573b --- /dev/null +++ b/openpype/hosts/maya/api/gltf.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +"""Tools to work with GLTF.""" +import logging + +from maya import cmds, mel # noqa + +log = logging.getLogger(__name__) + +_gltf_options = { + "of": str, # outputFolder + "cpr": str, # copyright + "sno": bool, # selectedNodeOnly + "sn": str, # sceneName + "glb": bool, # binary + "nbu": bool, # niceBufferURIs + "hbu": bool, # hashBufferURI + "ext": bool, # externalTextures + "ivt": int, # initialValuesTime + "acn": str, # animationClipName + "ast": int, # animationClipStartTime + "aet": int, # animationClipEndTime + "afr": float, # animationClipFrameRate + "dsa": int, # detectStepAnimations + "mpa": str, # meshPrimitiveAttributes + "bpa": str, # blendPrimitiveAttributes + "i32": bool, # force32bitIndices + "ssm": bool, # skipStandardMaterials + "eut": bool, # excludeUnusedTexcoord + "dm": bool, # defaultMaterial + "cm": bool, # colorizeMaterials + "dmy": str, # dumpMaya + "dgl": str, # dumpGLTF + "imd": str, # ignoreMeshDeformers + "ssc": bool, # skipSkinClusters + "sbs": bool, # skipBlendShapes + "rvp": bool, # redrawViewport + "vno": bool # visibleNodesOnly +} + + +def extract_gltf(parent_dir, + filename, + **kwargs): + + """Sets GLTF export options from data in the instance. + + """ + + cmds.loadPlugin('maya2glTF', quiet=True) + # load the UI to run mel command + mel.eval("maya2glTF_UI()") + + parent_dir = parent_dir.replace('\\', '/') + options = { + "dsa": 1, + "glb": True + } + options.update(kwargs) + + for key, value in options.copy().items(): + if key not in _gltf_options: + log.warning("extract_gltf() does not support option '%s'. " + "Flag will be ignored..", key) + options.pop(key) + options.pop(value) + continue + + job_args = list() + default_opt = "maya2glTF -of \"{0}\" -sn \"{1}\"".format(parent_dir, filename) # noqa + job_args.append(default_opt) + + for key, value in options.items(): + if isinstance(value, str): + job_args.append("-{0} \"{1}\"".format(key, value)) + elif isinstance(value, bool): + if value: + job_args.append("-{0}".format(key)) + else: + job_args.append("-{0} {1}".format(key, value)) + + job_str = " ".join(job_args) + log.info("{}".format(job_str)) + mel.eval(job_str) + + # close the gltf export after finish the export + gltf_UI = "maya2glTF_exporter_window" + if cmds.window(gltf_UI, q=True, exists=True): + cmds.deleteUI(gltf_UI) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 2530021eba6..b2bbb823aa8 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -127,14 +127,14 @@ def get_main_window(): @contextlib.contextmanager -def suspended_refresh(): +def suspended_refresh(suspend=True): """Suspend viewport refreshes""" - + original_state = cmds.refresh(query=True, suspend=True) try: - cmds.refresh(suspend=True) + cmds.refresh(suspend=suspend) yield finally: - cmds.refresh(suspend=False) + cmds.refresh(suspend=original_state) @contextlib.contextmanager diff --git a/openpype/hosts/maya/api/lib_renderproducts.py b/openpype/hosts/maya/api/lib_renderproducts.py index cd204445b7b..c54e3ab3e0c 100644 --- a/openpype/hosts/maya/api/lib_renderproducts.py +++ b/openpype/hosts/maya/api/lib_renderproducts.py @@ -536,6 +536,11 @@ def _get_aov_render_products(self, aov, cameras=None): products = [] aov_name = self._get_attr(aov, "name") + multipart = False + multilayer = bool(self._get_attr("defaultArnoldDriver.multipart")) + merge_AOVs = bool(self._get_attr("defaultArnoldDriver.mergeAOVs")) + if multilayer or merge_AOVs: + multipart = True ai_drivers = cmds.listConnections("{}.outputs".format(aov), source=True, destination=False, @@ -589,6 +594,7 @@ def _get_aov_render_products(self, aov, cameras=None): ext=ext, aov=aov_name, driver=ai_driver, + multipart=multipart, camera=camera) products.append(product) @@ -1016,7 +1022,11 @@ def get_render_products(self): # due to some AOVs still being written into separate files, # like Cryptomatte. # AOVs are merged in multi-channel file - multipart = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) + multipart = False + force_layer = bool(self._get_attr("redshiftOptions.exrForceMultilayer")) # noqa + exMultipart = bool(self._get_attr("redshiftOptions.exrMultipart")) + if exMultipart or force_layer: + multipart = True # Get Redshift Extension from image format image_format = self._get_attr("redshiftOptions.imageFormat") # integer @@ -1044,7 +1054,6 @@ def get_render_products(self): # Any AOVs that still get processed, like Cryptomatte # by themselves are not multipart files. - aov_multipart = not multipart # Redshift skips rendering of masterlayer without AOV suffix # when a Beauty AOV is rendered. It overrides the main layer. @@ -1075,7 +1084,7 @@ def get_render_products(self): productName=aov_light_group_name, aov=aov_name, ext=ext, - multipart=aov_multipart, + multipart=multipart, camera=camera) products.append(product) @@ -1089,7 +1098,7 @@ def get_render_products(self): product = RenderProduct(productName=aov_name, aov=aov_name, ext=ext, - multipart=aov_multipart, + multipart=multipart, camera=camera) products.append(product) @@ -1100,7 +1109,7 @@ def get_render_products(self): if light_groups_enabled: return products - beauty_name = "Beauty_other" if has_beauty_aov else "" + beauty_name = "BeautyAux" if has_beauty_aov else "" for camera in cameras: products.insert(0, RenderProduct(productName=beauty_name, diff --git a/openpype/hosts/maya/plugins/create/create_ass.py b/openpype/hosts/maya/plugins/create/create_ass.py index 39f226900ab..935a068ca5a 100644 --- a/openpype/hosts/maya/plugins/create/create_ass.py +++ b/openpype/hosts/maya/plugins/create/create_ass.py @@ -1,5 +1,3 @@ -from collections import OrderedDict - from openpype.hosts.maya.api import ( lib, plugin @@ -9,12 +7,26 @@ class CreateAss(plugin.Creator): - """Arnold Archive""" + """Arnold Scene Source""" name = "ass" - label = "Ass StandIn" + label = "Arnold Scene Source" family = "ass" icon = "cube" + expandProcedurals = False + motionBlur = True + motionBlurKeys = 2 + motionBlurLength = 0.5 + maskOptions = False + maskCamera = False + maskLight = False + maskShape = False + maskShader = False + maskOverride = False + maskDriver = False + maskFilter = False + maskColor_manager = False + maskOperator = False def __init__(self, *args, **kwargs): super(CreateAss, self).__init__(*args, **kwargs) @@ -22,17 +34,27 @@ def __init__(self, *args, **kwargs): # Add animation data self.data.update(lib.collect_animation_data()) - # Vertex colors with the geometry - self.data["exportSequence"] = False + self.data["expandProcedurals"] = self.expandProcedurals + self.data["motionBlur"] = self.motionBlur + self.data["motionBlurKeys"] = self.motionBlurKeys + self.data["motionBlurLength"] = self.motionBlurLength + + # Masks + self.data["maskOptions"] = self.maskOptions + self.data["maskCamera"] = self.maskCamera + self.data["maskLight"] = self.maskLight + self.data["maskShape"] = self.maskShape + self.data["maskShader"] = self.maskShader + self.data["maskOverride"] = self.maskOverride + self.data["maskDriver"] = self.maskDriver + self.data["maskFilter"] = self.maskFilter + self.data["maskColor_manager"] = self.maskColor_manager + self.data["maskOperator"] = self.maskOperator def process(self): instance = super(CreateAss, self).process() - # data = OrderedDict(**self.data) - - - - nodes = list() + nodes = [] if (self.options or {}).get("useSelection"): nodes = cmds.ls(selection=True) @@ -42,7 +64,3 @@ def process(self): assContent = cmds.sets(name="content_SET") assProxy = cmds.sets(name="proxy_SET", empty=True) cmds.sets([assContent, assProxy], forceElement=instance) - - # self.log.info(data) - # - # self.data = data diff --git a/openpype/hosts/maya/plugins/create/create_pointcache.py b/openpype/hosts/maya/plugins/create/create_pointcache.py index ab8fe120797..cdec140ea8c 100644 --- a/openpype/hosts/maya/plugins/create/create_pointcache.py +++ b/openpype/hosts/maya/plugins/create/create_pointcache.py @@ -28,6 +28,7 @@ def __init__(self, *args, **kwargs): self.data["visibleOnly"] = False # only nodes that are visible self.data["includeParentHierarchy"] = False # Include parent groups self.data["worldSpace"] = True # Default to exporting world-space + self.data["refresh"] = False # Default to suspend refresh. # Add options for custom attributes self.data["attr"] = "" diff --git a/openpype/hosts/maya/plugins/publish/collect_ass.py b/openpype/hosts/maya/plugins/publish/collect_ass.py index 7c9a1b76fbf..b5e05d66657 100644 --- a/openpype/hosts/maya/plugins/publish/collect_ass.py +++ b/openpype/hosts/maya/plugins/publish/collect_ass.py @@ -1,4 +1,5 @@ from maya import cmds +from openpype.pipeline.publish import KnownPublishError import pyblish.api @@ -6,6 +7,7 @@ class CollectAssData(pyblish.api.InstancePlugin): """Collect Ass data.""" + # Offset to be after renderable camera collection. order = pyblish.api.CollectorOrder + 0.2 label = 'Collect Ass' families = ["ass"] @@ -23,8 +25,23 @@ def process(self, instance): instance.data['setMembers'] = members self.log.debug('content members: {}'.format(members)) elif objset.startswith("proxy_SET"): - assert len(members) == 1, "You have multiple proxy meshes, please only use one" + if len(members) != 1: + msg = "You have multiple proxy meshes, please only use one" + raise KnownPublishError(msg) instance.data['proxy'] = members self.log.debug('proxy members: {}'.format(members)) + # Use camera in object set if present else default to render globals + # camera. + cameras = cmds.ls(type="camera", long=True) + renderable = [c for c in cameras if cmds.getAttr("%s.renderable" % c)] + camera = renderable[0] + for node in instance.data["setMembers"]: + camera_shapes = cmds.listRelatives( + node, shapes=True, type="camera" + ) + if camera_shapes: + camera = node + instance.data["camera"] = camera + self.log.debug("data: {}".format(instance.data)) diff --git a/openpype/hosts/maya/plugins/publish/collect_gltf.py b/openpype/hosts/maya/plugins/publish/collect_gltf.py new file mode 100644 index 00000000000..bb37fe3a7eb --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/collect_gltf.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- +import pyblish.api + + +class CollectGLTF(pyblish.api.InstancePlugin): + """Collect Assets for GLTF/GLB export.""" + + order = pyblish.api.CollectorOrder + 0.2 + label = "Collect Asset for GLTF/GLB export" + families = ["model", "animation", "pointcache"] + + def process(self, instance): + if not instance.data.get("families"): + instance.data["families"] = [] + + if "gltf" not in instance.data["families"]: + instance.data["families"].append("gltf") diff --git a/openpype/hosts/maya/plugins/publish/collect_look.py b/openpype/hosts/maya/plugins/publish/collect_look.py index 157be5717bb..e1adffaaaf2 100644 --- a/openpype/hosts/maya/plugins/publish/collect_look.py +++ b/openpype/hosts/maya/plugins/publish/collect_look.py @@ -403,13 +403,13 @@ def collect(self, instance): # history = cmds.listHistory(look_sets) history = [] for material in materials: - history.extend(cmds.listHistory(material)) + history.extend(cmds.listHistory(material, ac=True)) # handle VrayPluginNodeMtl node - see #1397 vray_plugin_nodes = cmds.ls( history, type="VRayPluginNodeMtl", long=True) for vray_node in vray_plugin_nodes: - history.extend(cmds.listHistory(vray_node)) + history.extend(cmds.listHistory(vray_node, ac=True)) # handling render attribute sets render_set_types = [ diff --git a/openpype/hosts/maya/plugins/publish/extract_ass.py b/openpype/hosts/maya/plugins/publish/extract_ass.py index 5c21a4ff086..049f256a7af 100644 --- a/openpype/hosts/maya/plugins/publish/extract_ass.py +++ b/openpype/hosts/maya/plugins/publish/extract_ass.py @@ -1,77 +1,93 @@ import os from maya import cmds +import arnold from openpype.pipeline import publish -from openpype.hosts.maya.api.lib import maintained_selection +from openpype.hosts.maya.api.lib import maintained_selection, attribute_values class ExtractAssStandin(publish.Extractor): - """Extract the content of the instance to a ass file + """Extract the content of the instance to a ass file""" - Things to pay attention to: - - If animation is toggled, are the frames correct - - - """ - - label = "Ass Standin (.ass)" + label = "Arnold Scene Source (.ass)" hosts = ["maya"] families = ["ass"] asciiAss = False def process(self, instance): - - sequence = instance.data.get("exportSequence", False) - staging_dir = self.staging_dir(instance) filename = "{}.ass".format(instance.name) - filenames = list() + filenames = [] file_path = os.path.join(staging_dir, filename) + # Mask + mask = arnold.AI_NODE_ALL + + node_types = { + "options": arnold.AI_NODE_OPTIONS, + "camera": arnold.AI_NODE_CAMERA, + "light": arnold.AI_NODE_LIGHT, + "shape": arnold.AI_NODE_SHAPE, + "shader": arnold.AI_NODE_SHADER, + "override": arnold.AI_NODE_OVERRIDE, + "driver": arnold.AI_NODE_DRIVER, + "filter": arnold.AI_NODE_FILTER, + "color_manager": arnold.AI_NODE_COLOR_MANAGER, + "operator": arnold.AI_NODE_OPERATOR + } + + for key in node_types.keys(): + if instance.data.get("mask" + key.title()): + mask = mask ^ node_types[key] + + # Motion blur + values = { + "defaultArnoldRenderOptions.motion_blur_enable": instance.data.get( + "motionBlur", True + ), + "defaultArnoldRenderOptions.motion_steps": instance.data.get( + "motionBlurKeys", 2 + ), + "defaultArnoldRenderOptions.motion_frames": instance.data.get( + "motionBlurLength", 0.5 + ) + } + # Write out .ass file + kwargs = { + "filename": file_path, + "startFrame": instance.data.get("frameStartHandle", 1), + "endFrame": instance.data.get("frameEndHandle", 1), + "frameStep": instance.data.get("step", 1), + "selected": True, + "asciiAss": self.asciiAss, + "shadowLinks": True, + "lightLinks": True, + "boundingBox": True, + "expandProcedurals": instance.data.get("expandProcedurals", False), + "camera": instance.data["camera"], + "mask": mask + } + self.log.info("Writing: '%s'" % file_path) - with maintained_selection(): - self.log.info("Writing: {}".format(instance.data["setMembers"])) - cmds.select(instance.data["setMembers"], noExpand=True) - - if sequence: - self.log.info("Extracting ass sequence") - - # Collect the start and end including handles - start = instance.data.get("frameStartHandle", 1) - end = instance.data.get("frameEndHandle", 1) - step = instance.data.get("step", 0) - - exported_files = cmds.arnoldExportAss(filename=file_path, - selected=True, - asciiAss=self.asciiAss, - shadowLinks=True, - lightLinks=True, - boundingBox=True, - startFrame=start, - endFrame=end, - frameStep=step - ) + with attribute_values(values): + with maintained_selection(): + self.log.info( + "Writing: {}".format(instance.data["setMembers"]) + ) + cmds.select(instance.data["setMembers"], noExpand=True) + + self.log.info( + "Extracting ass sequence with: {}".format(kwargs) + ) + + exported_files = cmds.arnoldExportAss(**kwargs) + for file in exported_files: filenames.append(os.path.split(file)[1]) + self.log.info("Exported: {}".format(filenames)) - else: - self.log.info("Extracting ass") - cmds.arnoldExportAss(filename=file_path, - selected=True, - asciiAss=False, - shadowLinks=True, - lightLinks=True, - boundingBox=True - ) - self.log.info("Extracted {}".format(filename)) - filenames = filename - optionals = [ - "frameStart", "frameEnd", "step", "handles", - "handleEnd", "handleStart" - ] - for key in optionals: - instance.data.pop(key, None) if "representations" not in instance.data: instance.data["representations"] = [] @@ -79,13 +95,11 @@ def process(self, instance): representation = { 'name': 'ass', 'ext': 'ass', - 'files': filenames, - "stagingDir": staging_dir + 'files': filenames if len(filenames) > 1 else filenames[0], + "stagingDir": staging_dir, + 'frameStart': kwargs["startFrame"] } - if sequence: - representation['frameStart'] = start - instance.data["representations"].append(representation) self.log.info("Extracted instance '%s' to: %s" diff --git a/openpype/hosts/maya/plugins/publish/extract_gltf.py b/openpype/hosts/maya/plugins/publish/extract_gltf.py new file mode 100644 index 00000000000..f5ceed5f33f --- /dev/null +++ b/openpype/hosts/maya/plugins/publish/extract_gltf.py @@ -0,0 +1,65 @@ +import os + +from maya import cmds, mel +import pyblish.api + +from openpype.pipeline import publish +from openpype.hosts.maya.api import lib +from openpype.hosts.maya.api.gltf import extract_gltf + + +class ExtractGLB(publish.Extractor): + + order = pyblish.api.ExtractorOrder + hosts = ["maya"] + label = "Extract GLB" + families = ["gltf"] + + def process(self, instance): + staging_dir = self.staging_dir(instance) + filename = "{0}.glb".format(instance.name) + path = os.path.join(staging_dir, filename) + + self.log.info("Extracting GLB to: {}".format(path)) + + nodes = instance[:] + + self.log.info("Instance: {0}".format(nodes)) + + start_frame = instance.data('frameStart') or \ + int(cmds.playbackOptions(query=True, + animationStartTime=True))# noqa + end_frame = instance.data('frameEnd') or \ + int(cmds.playbackOptions(query=True, + animationEndTime=True)) # noqa + fps = mel.eval('currentTimeUnitToFPS()') + + options = { + "sno": True, # selectedNodeOnly + "nbu": True, # .bin instead of .bin0 + "ast": start_frame, + "aet": end_frame, + "afr": fps, + "dsa": 1, + "acn": instance.name, + "glb": True, + "vno": True # visibleNodeOnly + } + with lib.maintained_selection(): + cmds.select(nodes, hi=True, noExpand=True) + extract_gltf(staging_dir, + instance.name, + **options) + + if "representations" not in instance.data: + instance.data["representations"] = [] + + representation = { + 'name': 'glb', + 'ext': 'glb', + 'files': filename, + "stagingDir": staging_dir, + } + instance.data["representations"].append(representation) + + self.log.info("Extract GLB successful to: {0}".format(path)) diff --git a/openpype/hosts/maya/plugins/publish/extract_look.py b/openpype/hosts/maya/plugins/publish/extract_look.py index 403b4ee6bc0..df07a674dcb 100644 --- a/openpype/hosts/maya/plugins/publish/extract_look.py +++ b/openpype/hosts/maya/plugins/publish/extract_look.py @@ -90,7 +90,7 @@ def maketx(source, destination, args, logger): maketx_path = get_oiio_tools_path("maketx") - if not os.path.exists(maketx_path): + if not maketx_path: print( "OIIO tool not found in {}".format(maketx_path)) raise AssertionError("OIIO tool not found") diff --git a/openpype/hosts/maya/plugins/publish/extract_playblast.py b/openpype/hosts/maya/plugins/publish/extract_playblast.py index b19d24fad74..1f9f9db99a7 100644 --- a/openpype/hosts/maya/plugins/publish/extract_playblast.py +++ b/openpype/hosts/maya/plugins/publish/extract_playblast.py @@ -115,6 +115,10 @@ def process(self, instance): else: preset["viewport_options"] = {"imagePlane": image_plane} + # Disable Pan/Zoom. + pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) + with lib.maintained_time(): filename = preset.get("filename", "%TEMP%") @@ -135,6 +139,8 @@ def process(self, instance): path = capture.capture(log=self.log, **preset) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) + self.log.debug("playblast path {}".format(path)) collected_files = os.listdir(stagingdir) diff --git a/openpype/hosts/maya/plugins/publish/extract_pointcache.py b/openpype/hosts/maya/plugins/publish/extract_pointcache.py index 7c1c6d5c125..23b76a48c2e 100644 --- a/openpype/hosts/maya/plugins/publish/extract_pointcache.py +++ b/openpype/hosts/maya/plugins/publish/extract_pointcache.py @@ -86,13 +86,15 @@ def process(self, instance): start=start, end=end)) - with suspended_refresh(): + with suspended_refresh(suspend=instance.data.get("refresh", False)): with maintained_selection(): cmds.select(nodes, noExpand=True) - extract_alembic(file=path, - startFrame=start, - endFrame=end, - **options) + extract_alembic( + file=path, + startFrame=start, + endFrame=end, + **options + ) if "representations" not in instance.data: instance.data["representations"] = [] diff --git a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py index 712159c2bed..06244cf0037 100644 --- a/openpype/hosts/maya/plugins/publish/extract_thumbnail.py +++ b/openpype/hosts/maya/plugins/publish/extract_thumbnail.py @@ -117,6 +117,10 @@ def process(self, instance): else: preset["viewport_options"] = {"imagePlane": image_plane} + # Disable Pan/Zoom. + pan_zoom = cmds.getAttr("{}.panZoomEnabled".format(preset["camera"])) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), False) + with lib.maintained_time(): # Force viewer to False in call to capture because we have our own # viewer opening call to allow a signal to trigger between @@ -136,6 +140,7 @@ def process(self, instance): _, thumbnail = os.path.split(playblast) + cmds.setAttr("{}.panZoomEnabled".format(preset["camera"]), pan_zoom) self.log.info("file list {}".format(thumbnail)) diff --git a/openpype/hosts/nuke/addon.py b/openpype/hosts/nuke/addon.py index 1c5d5c4005f..9d25afe2b64 100644 --- a/openpype/hosts/nuke/addon.py +++ b/openpype/hosts/nuke/addon.py @@ -27,7 +27,12 @@ def add_implementation_envs(self, env, _app): new_nuke_paths.append(norm_path) env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) + # Remove auto screen scale factor for Qt + # - let Nuke decide it's value env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) + # Remove tkinter library paths if are set + env.pop("TK_LIBRARY", None) + env.pop("TCL_LIBRARY", None) # Add vendor to PYTHONPATH python_path = env["PYTHONPATH"] diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 2691b7447a1..bde06e4fd78 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2961,7 +2961,7 @@ def get_viewer_config_from_string(input_string): viewer = split[1] display = split[0] elif "(" in viewer: - pattern = r"([\w\d\s]+).*[(](.*)[)]" + pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]" result = re.findall(pattern, viewer) try: result = result.pop() diff --git a/openpype/hosts/nuke/api/pipeline.py b/openpype/hosts/nuke/api/pipeline.py index c343c635faf..fb707ca44c8 100644 --- a/openpype/hosts/nuke/api/pipeline.py +++ b/openpype/hosts/nuke/api/pipeline.py @@ -364,6 +364,9 @@ def containerise(node, set_avalon_knob_data(node, data) + # set tab to first native + node.setTab(0) + return node diff --git a/openpype/hosts/nuke/plugins/load/load_camera_abc.py b/openpype/hosts/nuke/plugins/load/load_camera_abc.py index f5dfc8c0ab9..9fef7424c84 100644 --- a/openpype/hosts/nuke/plugins/load/load_camera_abc.py +++ b/openpype/hosts/nuke/plugins/load/load_camera_abc.py @@ -65,6 +65,9 @@ def load(self, context, name, namespace, data): object_name, file), inpanel=False ) + # hide property panel + camera_node.hideControlPanel() + camera_node.forceValidate() camera_node["frame_rate"].setValue(float(fps)) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index b17356c5c7a..565d777811c 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -145,6 +145,9 @@ def load(self, context, name, namespace, options): "Read", "name {}".format(read_name)) + # hide property panel + read_node.hideControlPanel() + # to avoid multiple undo steps for rest of process # we will switch off undo-ing with viewer_update_and_undo_stop(): diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index d164e0604c7..cef4b0a5fcc 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -89,6 +89,9 @@ def load(self, context, name, namespace, data): "Group", "name {}_1".format(object_name)) + # hide property panel + GN.hideControlPanel() + # adding content to the group node with GN: pre_node = nuke.createNode("Input") diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index 44565c139d0..9bd40be8169 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -90,6 +90,9 @@ def load(self, context, name, namespace, data): "Group", "name {}_1".format(object_name)) + # hide property panel + GN.hideControlPanel() + # adding content to the group node with GN: pre_node = nuke.createNode("Input") diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 3e81ef999b5..49dc12f588e 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -62,7 +62,9 @@ def get_representations(cls): def load(self, context, name, namespace, options): self.log.info("__ options: `{}`".format(options)) - frame_number = options.get("frame_number", 1) + frame_number = options.get( + "frame_number", int(nuke.root()["first_frame"].getValue()) + ) version = context['version'] version_data = version.get("data", {}) @@ -112,6 +114,10 @@ def load(self, context, name, namespace, options): r = nuke.createNode( "Read", "name {}".format(read_name)) + + # hide property panel + r.hideControlPanel() + r["file"].setValue(file) # Set colorspace defined in version data diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index 151401bad3f..ad985e83c66 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -63,6 +63,10 @@ def load(self, context, name, namespace, data): object_name, file), inpanel=False ) + + # hide property panel + model_node.hideControlPanel() + model_node.forceValidate() # Ensure all items are imported and selected. diff --git a/openpype/hosts/nuke/plugins/load/load_script_precomp.py b/openpype/hosts/nuke/plugins/load/load_script_precomp.py index 21e384b538e..f0972f85d25 100644 --- a/openpype/hosts/nuke/plugins/load/load_script_precomp.py +++ b/openpype/hosts/nuke/plugins/load/load_script_precomp.py @@ -71,6 +71,9 @@ def load(self, context, name, namespace, data): "Precomp", "file {}".format(file)) + # hide property panel + P.hideControlPanel() + # Set colorspace defined in version data colorspace = context["version"]["data"].get("colorspace", None) self.log.info("colorspace: {}\n".format(colorspace)) diff --git a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py index e7197b4fa81..06c086b10dd 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py +++ b/openpype/hosts/nuke/plugins/publish/extract_slate_frame.py @@ -298,7 +298,7 @@ def _render_slate_to_sequence(self, instance): def add_comment_slate_node(self, instance, node): - comment = instance.context.data.get("comment") + comment = instance.data["comment"] intent = instance.context.data.get("intent") if not isinstance(intent, dict): intent = { diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 2792a775e06..76724581659 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -29,7 +29,8 @@ def process(self): if len(selection) > 1: # Ask user whether to create one image or image per selected # item. - msg_box = QtWidgets.QMessageBox() + active_window = QtWidgets.QApplication.activeWindow() + msg_box = QtWidgets.QMessageBox(parent=active_window) msg_box.setIcon(QtWidgets.QMessageBox.Warning) msg_box.setText( "Multiple layers selected." @@ -102,7 +103,7 @@ def process(self): if group.long_name: for directory in group.long_name[::-1]: name = directory.replace(stub.PUBLISH_ICON, '').\ - replace(stub.LOADED_ICON, '') + replace(stub.LOADED_ICON, '') long_names.append(name) self.data.update({"subset": subset_name}) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py index 56ea82f6b69..a7ae02a2ebb 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/validate_texture_workfiles.py @@ -1,5 +1,7 @@ +import os import pyblish.api +from openpype.settings import get_project_settings from openpype.pipeline.publish import ( ValidateContentsOrder, PublishXmlValidationError, @@ -18,23 +20,38 @@ class ValidateTextureBatchWorkfiles(pyblish.api.InstancePlugin): families = ["texture_batch_workfile"] optional = True - # from presets - main_workfile_extensions = ['mra'] - def process(self, instance): if instance.data["family"] == "workfile": ext = instance.data["representations"][0]["ext"] - if ext not in self.main_workfile_extensions: + main_workfile_extensions = self.get_main_workfile_extensions() + if ext not in main_workfile_extensions: self.log.warning("Only secondary workfile present!") return if not instance.data.get("resources"): msg = "No secondary workfile present for workfile '{}'". \ format(instance.data["name"]) - ext = self.main_workfile_extensions[0] + ext = main_workfile_extensions[0] formatting_data = {"file_name": instance.data["name"], "extension": ext} raise PublishXmlValidationError(self, msg, formatting_data=formatting_data ) + + @staticmethod + def get_main_workfile_extensions(): + project_settings = get_project_settings(os.environ["AVALON_PROJECT"]) + + try: + extensions = (project_settings["standalonepublisher"] + ["publish"] + ["CollectTextures"] + ["main_workfile_extensions"]) + except KeyError: + raise Exception("Setting 'Main workfile extensions' not found." + " The setting must be set for the" + " 'Collect Texture' publish plugin of the" + " 'Standalone Publish' tool.") + + return extensions diff --git a/openpype/hosts/traypublisher/plugins/create/create_online.py b/openpype/hosts/traypublisher/plugins/create/create_online.py new file mode 100644 index 00000000000..19f956a50ee --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/create/create_online.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +"""Creator of online files. + +Online file retain their original name and use it as subset name. To +avoid conflicts, this creator checks if subset with this name already +exists under selected asset. +""" +from pathlib import Path + +from openpype.client import get_subset_by_name, get_asset_by_name +from openpype.lib.attribute_definitions import FileDef +from openpype.pipeline import ( + CreatedInstance, + CreatorError +) +from openpype.hosts.traypublisher.api.plugin import TrayPublishCreator + + +class OnlineCreator(TrayPublishCreator): + """Creates instance from file and retains its original name.""" + + identifier = "io.openpype.creators.traypublisher.online" + label = "Online" + family = "online" + description = "Publish file retaining its original file name" + extensions = [".mov", ".mp4", ".mxf", ".m4v", ".mpg"] + + def get_detail_description(self): + return """# Create file retaining its original file name. + + This will publish files using template helping to retain original + file name and that file name is used as subset name. + + Bz default it tries to guard against multiple publishes of the same + file.""" + + def get_icon(self): + return "fa.file" + + def create(self, subset_name, instance_data, pre_create_data): + repr_file = pre_create_data.get("representation_file") + if not repr_file: + raise CreatorError("No files specified") + + files = repr_file.get("filenames") + if not files: + # this should never happen + raise CreatorError("Missing files from representation") + + origin_basename = Path(files[0]).stem + + asset = get_asset_by_name( + self.project_name, instance_data["asset"], fields=["_id"]) + if get_subset_by_name( + self.project_name, origin_basename, asset["_id"], + fields=["_id"]): + raise CreatorError(f"subset with {origin_basename} already " + "exists in selected asset") + + instance_data["originalBasename"] = origin_basename + subset_name = origin_basename + + instance_data["creator_attributes"] = { + "path": (Path(repr_file["directory"]) / files[0]).as_posix() + } + + # Create new instance + new_instance = CreatedInstance(self.family, subset_name, + instance_data, self) + self._store_new_instance(new_instance) + + def get_pre_create_attr_defs(self): + return [ + FileDef( + "representation_file", + folders=False, + extensions=self.extensions, + allow_sequences=False, + single_item=True, + label="Representation", + ) + ] + + def get_subset_name( + self, + variant, + task_name, + asset_doc, + project_name, + host_name=None, + instance=None + ): + if instance is None: + return "{originalBasename}" + + return instance.data["subset"] diff --git a/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py new file mode 100644 index 00000000000..a3f86afa138 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/collect_online_file.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +import pyblish.api +from pathlib import Path + + +class CollectOnlineFile(pyblish.api.InstancePlugin): + """Collect online file and retain its file name.""" + label = "Collect Online File" + order = pyblish.api.CollectorOrder + families = ["online"] + hosts = ["traypublisher"] + + def process(self, instance): + file = Path(instance.data["creator_attributes"]["path"]) + + instance.data["representations"].append( + { + "name": file.suffix.lstrip("."), + "ext": file.suffix.lstrip("."), + "files": file.name, + "stagingDir": file.parent.as_posix() + } + ) diff --git a/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py new file mode 100644 index 00000000000..12b2e72ced9 --- /dev/null +++ b/openpype/hosts/traypublisher/plugins/publish/validate_online_file.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +import pyblish.api + +from openpype.pipeline.publish import ( + ValidateContentsOrder, + PublishValidationError, + OptionalPyblishPluginMixin, +) +from openpype.client import get_subset_by_name + + +class ValidateOnlineFile(OptionalPyblishPluginMixin, + pyblish.api.InstancePlugin): + """Validate that subset doesn't exist yet.""" + label = "Validate Existing Online Files" + hosts = ["traypublisher"] + families = ["online"] + order = ValidateContentsOrder + + optional = True + + def process(self, instance): + project_name = instance.context.data["projectName"] + asset_id = instance.data["assetEntity"]["_id"] + subset = get_subset_by_name( + project_name, instance.data["subset"], asset_id) + + if subset: + raise PublishValidationError( + "Subset to be published already exists.", + title=self.label + ) diff --git a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py index 1ebaf1da64b..78074f720c4 100644 --- a/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py +++ b/openpype/hosts/tvpaint/plugins/publish/extract_sequence.py @@ -25,6 +25,7 @@ class ExtractSequence(pyblish.api.Extractor): label = "Extract Sequence" hosts = ["tvpaint"] families = ["review", "renderPass", "renderLayer", "renderScene"] + families_to_review = ["review"] # Modifiable with settings review_bg = [255, 255, 255, 255] @@ -133,9 +134,9 @@ def process(self, instance): output_frame_start ) - # Fill tags and new families + # Fill tags and new families from project settings tags = [] - if family_lowered in ("review", "renderlayer", "renderscene"): + if family_lowered in self.families_to_review: tags.append("review") # Sequence of one frame diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 4f1e846c0bf..ed81104c05a 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,107 +2,150 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" +#include "NotificationManager.h" +#include "SNotificationList.h" +//Moves all the invalid pointers to the end to prepare them for the shrinking +#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ + VAR.Shrink(); UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer) - : UObject(ObjectInitializer) + : UPrimaryDataAsset(ObjectInitializer) { - FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked("AssetRegistry"); - FString path = UOpenPypePublishInstance::GetPathName(); + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< + FAssetRegistryModule>("AssetRegistry"); + + const FPropertyEditorModule& PropertyEditorModule = FModuleManager::LoadModuleChecked( + "PropertyEditor"); + + FString Left, Right; + GetPathName().Split("/" + GetName(), &Left, &Right); + FARFilter Filter; - Filter.PackagePaths.Add(FName(*path)); + Filter.PackagePaths.Emplace(FName(Left)); + + TArray FoundAssets; + AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets); - AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded); + for (const FAssetData& AssetData : FoundAssets) + OnAssetCreated(AssetData); + + REMOVE_INVALID_ENTRIES(AssetDataInternal) + REMOVE_INVALID_ENTRIES(AssetDataExternal) + + AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); - AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed); + AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); } -void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); + UObject* Asset = InAssetData.GetAsset(); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); + if (!IsValid(Asset)) + { + UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), + *InAssetData.ObjectPath.ToString()); + return; + } - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); + const bool result = IsUnderSameDir(Asset) && Cast(Asset) == nullptr; - // take interest only in paths starting with path of current container - if (assetDir.StartsWith(*selfDir)) + if (result) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + if (AssetDataInternal.Emplace(Asset).IsValidId()) { - assets.Add(assetPath); - UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir); + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); } } } -void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) { - TArray split; - - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - FString path = UOpenPypePublishInstance::GetPathName(); - FString lpp = FPackageName::GetLongPackagePath(*path); - - if (assetDir.StartsWith(*selfDir)) + if (Cast(InAssetData.GetAsset()) == nullptr) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + if (AssetDataInternal.Contains(nullptr)) { - // UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp); - assets.Remove(assetPath); + AssetDataInternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + } + else + { + AssetDataExternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataExternal) } } } -void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str) +void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) { - TArray split; + REMOVE_INVALID_ENTRIES(AssetDataInternal); + REMOVE_INVALID_ENTRIES(AssetDataExternal); +} - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); +bool UOpenPypePublishInstance::IsUnderSameDir(const UObject* InAsset) const +{ + FString ThisLeft, ThisRight; + this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + return InAsset->GetPathName().StartsWith(ThisLeft); +} - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); +#ifdef WITH_EDITOR - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - if (assetDir.StartsWith(*selfDir)) +void UOpenPypePublishInstance::SendNotification(const FString& Text) const +{ + FNotificationInfo Info{FText::FromString(Text)}; + + Info.bFireAndForget = true; + Info.bUseLargeFont = false; + Info.bUseThrobber = false; + Info.bUseSuccessFailIcons = false; + Info.ExpireDuration = 4.f; + Info.FadeOutDuration = 2.f; + + FSlateNotificationManager::Get().AddNotification(Info); + + UE_LOG(LogAssetData, Warning, + TEXT( + "Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!" + ), *GetName() + ) +} + + +void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); + + if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet && + PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( + UOpenPypePublishInstance, AssetDataExternal)) { - // exclude self - if (assetFName != "AssetContainer") + // Check for duplicated assets + for (const auto& Asset : AssetDataInternal) { + if (AssetDataExternal.Contains(Asset)) + { + AssetDataExternal.Remove(Asset); + return SendNotification( + "You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + } + } - assets.Remove(str); - assets.Add(assetPath); - // UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str); + // Check if no UOpenPypePublishInstance type assets are included + for (const auto& Asset : AssetDataExternal) + { + if (Cast(Asset.Get()) != nullptr) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add publish instances!"); + } } } } + +#endif diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp index e61964c6893..9b26da7fa48 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp @@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn bEditorImport = true; } -UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) { - UOpenPypePublishInstance* OpenPypePublishInstance = NewObject(InParent, Class, Name, Flags); - return OpenPypePublishInstance; + check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass())); + return NewObject(InParent, InClass, InName, Flags); } bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const { diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h index 0a27a078d7a..0e946fb039b 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -5,17 +5,99 @@ UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypePublishInstance : public UObject +class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { - GENERATED_BODY() - + GENERATED_UCLASS_BODY() + public: - UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer); + + /** + /** + * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is + * placed in) + * + * @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetInternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataInternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Retrieves all the assets which have been added manually by the Publish Instance + * + * @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetExternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataExternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Function for returning all the assets in the container combined. + * + * @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are + * returning raw pointers. Seems like an issue in UE5 + * + * @attention If the bAddExternalAssets variable is false, external assets won't be included! + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetAllAssets() const + { + const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; + + //Create a new TSet only with raw pointers. + TSet ResultSet; + + for (auto& Asset : IteratedSet) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + - UPROPERTY(EditAnywhere, BlueprintReadOnly) - TArray assets; private: - void OnAssetAdded(const FAssetData& AssetData); - void OnAssetRemoved(const FAssetData& AssetData); - void OnAssetRenamed(const FAssetData& AssetData, const FString& str); -}; \ No newline at end of file + + UPROPERTY(VisibleAnywhere, Category="Assets") + TSet> AssetDataInternal; + + /** + * This property allows exposing the array to include other assets from any other directory than what it's currently + * monitoring. NOTE: that these assets have to be added manually! They are not automatically registered or added! + */ + UPROPERTY(EditAnywhere, Category = "Assets") + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, meta=(EditCondition="bAddExternalAssets"), Category="Assets") + TSet> AssetDataExternal; + + + void OnAssetCreated(const FAssetData& InAssetData); + void OnAssetRemoved(const FAssetData& InAssetData); + void OnAssetUpdated(const FAssetData& InAssetData); + + bool IsUnderSameDir(const UObject* InAsset) const; + +#ifdef WITH_EDITOR + + void SendNotification(const FString& Text) const; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif + +}; + diff --git a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h index a2b3abe13ed..7d2c77fe6e9 100644 --- a/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h +++ b/openpype/hosts/unreal/integration/UE_4.7/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h @@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory public: UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer); - virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; virtual bool ShouldShowInNewMenu() const override; -}; \ No newline at end of file +}; diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 4f1e846c0bf..322663eeec8 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -2,107 +2,151 @@ #include "OpenPypePublishInstance.h" #include "AssetRegistryModule.h" +#include "AssetToolsModule.h" +#include "Framework/Notifications/NotificationManager.h" +#include "SNotificationList.h" +//Moves all the invalid pointers to the end to prepare them for the shrinking +#define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ + VAR.Shrink(); UOpenPypePublishInstance::UOpenPypePublishInstance(const FObjectInitializer& ObjectInitializer) - : UObject(ObjectInitializer) + : UPrimaryDataAsset(ObjectInitializer) { - FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked("AssetRegistry"); - FString path = UOpenPypePublishInstance::GetPathName(); + const FAssetRegistryModule& AssetRegistryModule = FModuleManager::LoadModuleChecked< + FAssetRegistryModule>("AssetRegistry"); + + FString Left, Right; + GetPathName().Split(GetName(), &Left, &Right); + FARFilter Filter; - Filter.PackagePaths.Add(FName(*path)); + Filter.PackagePaths.Emplace(FName(Left)); + + TArray FoundAssets; + AssetRegistryModule.GetRegistry().GetAssets(Filter, FoundAssets); + + for (const FAssetData& AssetData : FoundAssets) + OnAssetCreated(AssetData); - AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetAdded); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + REMOVE_INVALID_ENTRIES(AssetDataExternal) + + AssetRegistryModule.Get().OnAssetAdded().AddUObject(this, &UOpenPypePublishInstance::OnAssetCreated); AssetRegistryModule.Get().OnAssetRemoved().AddUObject(this, &UOpenPypePublishInstance::OnAssetRemoved); - AssetRegistryModule.Get().OnAssetRenamed().AddUObject(this, &UOpenPypePublishInstance::OnAssetRenamed); + AssetRegistryModule.Get().OnAssetUpdated().AddUObject(this, &UOpenPypePublishInstance::OnAssetUpdated); + + } -void UOpenPypePublishInstance::OnAssetAdded(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) { TArray split; - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + const TObjectPtr Asset = InAssetData.GetAsset(); - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); + if (!IsValid(Asset)) + { + UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), + *InAssetData.ObjectPath.ToString()); + return; + } - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); + const bool result = IsUnderSameDir(Asset) && Cast(Asset) == nullptr; - // take interest only in paths starting with path of current container - if (assetDir.StartsWith(*selfDir)) + if (result) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + if (AssetDataInternal.Emplace(Asset).IsValidId()) { - assets.Add(assetPath); - UE_LOG(LogTemp, Log, TEXT("%s: asset added to %s"), *selfFullPath, *selfDir); + UE_LOG(LogTemp, Log, TEXT("Added an Asset to PublishInstance - Publish Instance: %s, Asset %s"), + *this->GetName(), *Asset->GetName()); } } } -void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& AssetData) +void UOpenPypePublishInstance::OnAssetRemoved(const FAssetData& InAssetData) { - TArray split; - - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); - - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); - - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - - // take interest only in paths starting with path of current container - FString path = UOpenPypePublishInstance::GetPathName(); - FString lpp = FPackageName::GetLongPackagePath(*path); - - if (assetDir.StartsWith(*selfDir)) + if (Cast(InAssetData.GetAsset()) == nullptr) { - // exclude self - if (assetFName != "OpenPypePublishInstance") + if (AssetDataInternal.Contains(nullptr)) { - // UE_LOG(LogTemp, Warning, TEXT("%s: asset removed"), *lpp); - assets.Remove(assetPath); + AssetDataInternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataInternal) + } + else + { + AssetDataExternal.Remove(nullptr); + REMOVE_INVALID_ENTRIES(AssetDataExternal) } } } -void UOpenPypePublishInstance::OnAssetRenamed(const FAssetData& AssetData, const FString& str) +void UOpenPypePublishInstance::OnAssetUpdated(const FAssetData& InAssetData) { - TArray split; + REMOVE_INVALID_ENTRIES(AssetDataInternal); + REMOVE_INVALID_ENTRIES(AssetDataExternal); +} + +bool UOpenPypePublishInstance::IsUnderSameDir(const TObjectPtr& InAsset) const +{ + FString ThisLeft, ThisRight; + this->GetPathName().Split(this->GetName(), &ThisLeft, &ThisRight); - // get directory of current container - FString selfFullPath = UOpenPypePublishInstance::GetPathName(); - FString selfDir = FPackageName::GetLongPackagePath(*selfFullPath); + return InAsset->GetPathName().StartsWith(ThisLeft); +} - // get asset path and class - FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); +#ifdef WITH_EDITOR - // split path - assetPath.ParseIntoArray(split, TEXT(" "), true); +void UOpenPypePublishInstance::SendNotification(const FString& Text) const +{ + FNotificationInfo Info{FText::FromString(Text)}; + + Info.bFireAndForget = true; + Info.bUseLargeFont = false; + Info.bUseThrobber = false; + Info.bUseSuccessFailIcons = false; + Info.ExpireDuration = 4.f; + Info.FadeOutDuration = 2.f; + + FSlateNotificationManager::Get().AddNotification(Info); + + UE_LOG(LogAssetData, Warning, + TEXT( + "Removed duplicated asset from the AssetsDataExternal in Container \"%s\", Asset is already included in the AssetDataInternal!" + ), *GetName() + ) +} - FString assetDir = FPackageName::GetLongPackagePath(*split[1]); - if (assetDir.StartsWith(*selfDir)) + +void UOpenPypePublishInstance::PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) +{ + Super::PostEditChangeProperty(PropertyChangedEvent); + + if (PropertyChangedEvent.ChangeType == EPropertyChangeType::ValueSet && + PropertyChangedEvent.Property->GetFName() == GET_MEMBER_NAME_CHECKED( + UOpenPypePublishInstance, AssetDataExternal)) { - // exclude self - if (assetFName != "AssetContainer") + + // Check for duplicated assets + for (const auto& Asset : AssetDataInternal) { + if (AssetDataExternal.Contains(Asset)) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add assets into AssetDataExternal which are already included in AssetDataInternal!"); + } + + } - assets.Remove(str); - assets.Add(assetPath); - // UE_LOG(LogTemp, Warning, TEXT("%s: asset renamed %s"), *lpp, *str); + // Check if no UOpenPypePublishInstance type assets are included + for (const auto& Asset : AssetDataExternal) + { + if (Cast(Asset.Get()) != nullptr) + { + AssetDataExternal.Remove(Asset); + return SendNotification("You are not allowed to add publish instances!"); + } } } } + +#endif diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp index e61964c6893..9b26da7fa48 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstanceFactory.cpp @@ -9,10 +9,10 @@ UOpenPypePublishInstanceFactory::UOpenPypePublishInstanceFactory(const FObjectIn bEditorImport = true; } -UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) +UObject* UOpenPypePublishInstanceFactory::FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) { - UOpenPypePublishInstance* OpenPypePublishInstance = NewObject(InParent, Class, Name, Flags); - return OpenPypePublishInstance; + check(InClass->IsChildOf(UOpenPypePublishInstance::StaticClass())); + return NewObject(InParent, InClass, InName, Flags); } bool UOpenPypePublishInstanceFactory::ShouldShowInNewMenu() const { diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 0a27a078d7a..2f066bd94bb 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -1,21 +1,97 @@ #pragma once +#include "EditorTutorial.h" #include "Engine.h" #include "OpenPypePublishInstance.generated.h" UCLASS(Blueprintable) -class OPENPYPE_API UOpenPypePublishInstance : public UObject +class OPENPYPE_API UOpenPypePublishInstance : public UPrimaryDataAsset { - GENERATED_BODY() - + GENERATED_UCLASS_BODY() public: - UOpenPypePublishInstance(const FObjectInitializer& ObjectInitalizer); + /** + * Retrieves all the assets which are monitored by the Publish Instance (Monitors assets in the directory which is + * placed in) + * + * @return - Set of UObjects. Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetInternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataInternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Retrieves all the assets which have been added manually by the Publish Instance + * + * @return - TSet of assets (UObjects). Careful! They are returning raw pointers. Seems like an issue in UE5 + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetExternalAssets() const + { + //For some reason it can only return Raw Pointers? Seems like an issue which they haven't fixed. + TSet ResultSet; + + for (const auto& Asset : AssetDataExternal) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } + + /** + * Function for returning all the assets in the container combined. + * + * @return Returns all the internal and externally added assets into one set (TSet of UObjects). Careful! They are + * returning raw pointers. Seems like an issue in UE5 + * + * @attention If the bAddExternalAssets variable is false, external assets won't be included! + */ + UFUNCTION(BlueprintCallable, BlueprintPure) + TSet GetAllAssets() const + { + const TSet>& IteratedSet = bAddExternalAssets ? AssetDataInternal.Union(AssetDataExternal) : AssetDataInternal; + + //Create a new TSet only with raw pointers. + TSet ResultSet; + + for (auto& Asset : IteratedSet) + ResultSet.Add(Asset.LoadSynchronous()); + + return ResultSet; + } - UPROPERTY(EditAnywhere, BlueprintReadOnly) - TArray assets; private: - void OnAssetAdded(const FAssetData& AssetData); - void OnAssetRemoved(const FAssetData& AssetData); - void OnAssetRenamed(const FAssetData& AssetData, const FString& str); -}; \ No newline at end of file + UPROPERTY(VisibleAnywhere, Category="Assets") + TSet> AssetDataInternal; + + /** + * This property allows the instance to include other assets from any other directory than what it's currently + * monitoring. + * @attention assets have to be added manually! They are not automatically registered or added! + */ + UPROPERTY(EditAnywhere, Category="Assets") + bool bAddExternalAssets = false; + + UPROPERTY(EditAnywhere, Category="Assets", meta=(EditCondition="bAddExternalAssets")) + TSet> AssetDataExternal; + + void OnAssetCreated(const FAssetData& InAssetData); + void OnAssetRemoved(const FAssetData& InAssetData); + void OnAssetUpdated(const FAssetData& InAssetData); + + bool IsUnderSameDir(const TObjectPtr& InAsset) const; + +#ifdef WITH_EDITOR + + void SendNotification(const FString& Text) const; + virtual void PostEditChangeProperty(FPropertyChangedEvent& PropertyChangedEvent) override; + +#endif +}; diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h index a2b3abe13ed..7d2c77fe6e9 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstanceFactory.h @@ -14,6 +14,6 @@ class OPENPYPE_API UOpenPypePublishInstanceFactory : public UFactory public: UOpenPypePublishInstanceFactory(const FObjectInitializer& ObjectInitializer); - virtual UObject* FactoryCreateNew(UClass* Class, UObject* InParent, FName Name, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; + virtual UObject* FactoryCreateNew(UClass* InClass, UObject* InParent, FName InName, EObjectFlags Flags, UObject* Context, FFeedbackContext* Warn) override; virtual bool ShouldShowInNewMenu() const override; -}; \ No newline at end of file +}; diff --git a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py index 2bf097de418..79ed499a20b 100644 --- a/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py +++ b/openpype/hosts/webpublisher/plugins/publish/collect_published_files.py @@ -86,6 +86,7 @@ def process(self, context): first_file = task_data["files"][0] _, extension = os.path.splitext(first_file) + extension = extension.lower() family, families, tags = self._get_family( self.task_type_to_family, task_type, @@ -180,6 +181,7 @@ def _get_subset_name(self, family, subset_template, task_name, variant): def _get_single_repre(self, task_dir, files, tags): _, ext = os.path.splitext(files[0]) + ext = ext.lower() repre_data = { "name": ext[1:], "ext": ext[1:], @@ -199,6 +201,7 @@ def _process_sequence(self, files, task_dir, tags): frame_start = list(collections[0].indexes)[0] frame_end = list(collections[0].indexes)[-1] ext = collections[0].tail + ext = ext.lower() repre_data = { "frameStart": frame_start, "frameEnd": frame_end, @@ -244,8 +247,17 @@ def _get_family(self, settings, task_type, is_sequence, extension): for config in families_config: if is_sequence != config["is_sequence"]: continue - if (extension in config["extensions"] or - '' in config["extensions"]): # all extensions setting + extensions = config.get("extensions") or [] + lower_extensions = set() + for ext in extensions: + if ext: + ext = ext.lower() + if ext.startswith("."): + ext = ext[1:] + lower_extensions.add(ext) + + # all extensions setting + if not lower_extensions or extension in lower_extensions: found_family = config["result_family"] break diff --git a/openpype/lib/attribute_definitions.py b/openpype/lib/attribute_definitions.py index 6baeaec045f..0df7b16e642 100644 --- a/openpype/lib/attribute_definitions.py +++ b/openpype/lib/attribute_definitions.py @@ -105,11 +105,14 @@ class AbtractAttrDef(object): How to force to set `key` attribute? Args: - key(str): Under which key will be attribute value stored. - label(str): Attribute label. - tooltip(str): Attribute tooltip. - is_label_horizontal(bool): UI specific argument. Specify if label is + key (str): Under which key will be attribute value stored. + default (Any): Default value of an attribute. + label (str): Attribute label. + tooltip (str): Attribute tooltip. + is_label_horizontal (bool): UI specific argument. Specify if label is next to value input or ahead. + hidden (bool): Will be item hidden (for UI purposes). + disabled (bool): Item will be visible but disabled (for UI purposes). """ type_attributes = [] @@ -117,16 +120,29 @@ class AbtractAttrDef(object): is_value_def = True def __init__( - self, key, default, label=None, tooltip=None, is_label_horizontal=None + self, + key, + default, + label=None, + tooltip=None, + is_label_horizontal=None, + hidden=False, + disabled=False ): if is_label_horizontal is None: is_label_horizontal = True + + if hidden is None: + hidden = False + self.key = key self.label = label self.tooltip = tooltip self.default = default self.is_label_horizontal = is_label_horizontal - self._id = uuid.uuid4() + self.hidden = hidden + self.disabled = disabled + self._id = uuid.uuid4().hex self.__init__class__ = AbtractAttrDef @@ -173,7 +189,9 @@ def serialize(self): "label": self.label, "tooltip": self.tooltip, "default": self.default, - "is_label_horizontal": self.is_label_horizontal + "is_label_horizontal": self.is_label_horizontal, + "hidden": self.hidden, + "disabled": self.disabled } for attr in self.type_attributes: data[attr] = getattr(self, attr) @@ -235,6 +253,26 @@ def convert_value(self, value): return value +class HiddenDef(AbtractAttrDef): + """Hidden value of Any type. + + This attribute can be used for UI purposes to pass values related + to other attributes (e.g. in multi-page UIs). + + Keep in mind the value should be possible to parse by json parser. + """ + + type = "hidden" + + def __init__(self, key, default=None, **kwargs): + kwargs["default"] = default + kwargs["hidden"] = True + super(UnknownDef, self).__init__(key, **kwargs) + + def convert_value(self, value): + return value + + class NumberDef(AbtractAttrDef): """Number definition. diff --git a/openpype/lib/file_transaction.py b/openpype/lib/file_transaction.py index 1626bec6b66..f265b8815cc 100644 --- a/openpype/lib/file_transaction.py +++ b/openpype/lib/file_transaction.py @@ -14,9 +14,9 @@ class FileTransaction(object): - """ + """File transaction with rollback options. - The file transaction is a three step process. + The file transaction is a three-step process. 1) Rename any existing files to a "temporary backup" during `process()` 2) Copy the files to final destination during `process()` @@ -39,14 +39,12 @@ class FileTransaction(object): Warning: Any folders created during the transfer will not be removed. - """ MODE_COPY = 0 MODE_HARDLINK = 1 def __init__(self, log=None): - if log is None: log = logging.getLogger("FileTransaction") @@ -63,49 +61,64 @@ def __init__(self, log=None): self._backup_to_original = {} def add(self, src, dst, mode=MODE_COPY): - """Add a new file to transfer queue""" + """Add a new file to transfer queue. + + Args: + src (str): Source path. + dst (str): Destination path. + mode (MODE_COPY, MODE_HARDLINK): Transfer mode. + """ + opts = {"mode": mode} - src = os.path.abspath(src) - dst = os.path.abspath(dst) + src = os.path.normpath(os.path.abspath(src)) + dst = os.path.normpath(os.path.abspath(dst)) if dst in self._transfers: queued_src = self._transfers[dst][0] if src == queued_src: - self.log.debug("File transfer was already " - "in queue: {} -> {}".format(src, dst)) + self.log.debug( + "File transfer was already in queue: {} -> {}".format( + src, dst)) return else: self.log.warning("File transfer in queue replaced..") - self.log.debug("Removed from queue: " - "{} -> {}".format(queued_src, dst)) - self.log.debug("Added to queue: {} -> {}".format(src, dst)) + self.log.debug( + "Removed from queue: {} -> {} replaced by {} -> {}".format( + queued_src, dst, src, dst)) self._transfers[dst] = (src, opts) def process(self): - # Backup any existing files - for dst in self._transfers.keys(): - if os.path.exists(dst): - # Backup original file - # todo: add timestamp or uuid to ensure unique - backup = dst + ".bak" - self._backup_to_original[backup] = dst - self.log.debug("Backup existing file: " - "{} -> {}".format(dst, backup)) - os.rename(dst, backup) + for dst, (src, _) in self._transfers.items(): + if dst == src or not os.path.exists(dst): + continue + + # Backup original file + # todo: add timestamp or uuid to ensure unique + backup = dst + ".bak" + self._backup_to_original[backup] = dst + self.log.debug( + "Backup existing file: {} -> {}".format(dst, backup)) + os.rename(dst, backup) # Copy the files to transfer for dst, (src, opts) in self._transfers.items(): + if dst == src: + self.log.debug( + "Source and destionation are same files {} -> {}".format( + src, dst)) + continue + self._create_folder_for_file(dst) if opts["mode"] == self.MODE_COPY: self.log.debug("Copying file ... {} -> {}".format(src, dst)) copyfile(src, dst) elif opts["mode"] == self.MODE_HARDLINK: - self.log.debug("Hardlinking file ... {} -> {}".format(src, - dst)) + self.log.debug("Hardlinking file ... {} -> {}".format( + src, dst)) create_hard_link(src, dst) self._transferred.append(dst) @@ -116,23 +129,21 @@ def finalize(self): try: os.remove(backup) except OSError: - self.log.error("Failed to remove backup file: " - "{}".format(backup), - exc_info=True) + self.log.error( + "Failed to remove backup file: {}".format(backup), + exc_info=True) def rollback(self): - errors = 0 - # Rollback any transferred files for path in self._transferred: try: os.remove(path) except OSError: errors += 1 - self.log.error("Failed to rollback created file: " - "{}".format(path), - exc_info=True) + self.log.error( + "Failed to rollback created file: {}".format(path), + exc_info=True) # Rollback the backups for backup, original in self._backup_to_original.items(): @@ -140,13 +151,15 @@ def rollback(self): os.rename(backup, original) except OSError: errors += 1 - self.log.error("Failed to restore original file: " - "{} -> {}".format(backup, original), - exc_info=True) + self.log.error( + "Failed to restore original file: {} -> {}".format( + backup, original), + exc_info=True) if errors: - self.log.error("{} errors occurred during " - "rollback.".format(errors), exc_info=True) + self.log.error( + "{} errors occurred during rollback.".format(errors), + exc_info=True) six.reraise(*sys.exc_info()) @property diff --git a/openpype/lib/path_templates.py b/openpype/lib/path_templates.py index b160054e380..0f99efb430a 100644 --- a/openpype/lib/path_templates.py +++ b/openpype/lib/path_templates.py @@ -422,7 +422,7 @@ def normalized(self): cls = self.__class__ return cls( - os.path.normpath(self), + os.path.normpath(self.replace("\\", "/")), self.template, self.solved, self.used_values, diff --git a/openpype/lib/transcoding.py b/openpype/lib/transcoding.py index 0bfccd34437..57279d0380d 100644 --- a/openpype/lib/transcoding.py +++ b/openpype/lib/transcoding.py @@ -77,26 +77,38 @@ def get_transcode_temp_directory(): ) -def get_oiio_info_for_input(filepath, logger=None): +def get_oiio_info_for_input(filepath, logger=None, subimages=False): """Call oiiotool to get information about input and return stdout. Stdout should contain xml format string. """ args = [ - get_oiio_tools_path(), "--info", "-v", "-i:infoformat=xml", filepath + get_oiio_tools_path(), + "--info", + "-v" ] + if subimages: + args.append("-a") + + args.extend(["-i:infoformat=xml", filepath]) + output = run_subprocess(args, logger=logger) output = output.replace("\r\n", "\n") xml_started = False + subimages_lines = [] lines = [] for line in output.split("\n"): if not xml_started: if not line.startswith("<"): continue xml_started = True + if xml_started: lines.append(line) + if line == "": + subimages_lines.append(lines) + lines = [] if not xml_started: raise ValueError( @@ -105,12 +117,19 @@ def get_oiio_info_for_input(filepath, logger=None): ) ) - xml_text = "\n".join(lines) - return parse_oiio_xml_output(xml_text, logger=logger) + output = [] + for subimage_lines in subimages_lines: + xml_text = "\n".join(subimage_lines) + output.append(parse_oiio_xml_output(xml_text, logger=logger)) + + if subimages: + return output + return output[0] class RationalToInt: """Rational value stored as division of 2 integers using string.""" + def __init__(self, string_value): parts = string_value.split("/") top = float(parts[0]) @@ -157,16 +176,16 @@ def convert_value_by_type_name(value_type, value, logger=None): if value_type == "int": return int(value) - if value_type == "float": + if value_type in ("float", "double"): return float(value) # Vectors will probably have more types - if value_type in ("vec2f", "float2"): + if value_type in ("vec2f", "float2", "float2d"): return [float(item) for item in value.split(",")] # Matrix should be always have square size of element 3x3, 4x4 # - are returned as list of lists - if value_type == "matrix": + if value_type in ("matrix", "matrixd"): output = [] current_index = -1 parts = value.split(",") @@ -198,7 +217,7 @@ def convert_value_by_type_name(value_type, value, logger=None): if value_type == "rational2i": return RationalToInt(value) - if value_type == "vector": + if value_type in ("vector", "vectord"): parts = [part.strip() for part in value.split(",")] output = [] for part in parts: @@ -380,6 +399,10 @@ def should_convert_for_ffmpeg(src_filepath): if not input_info: return None + subimages = input_info.get("subimages") + if subimages is not None and subimages > 1: + return True + # Check compression compression = input_info["attribs"].get("compression") if compression in ("dwaa", "dwab"): @@ -453,7 +476,7 @@ def convert_for_ffmpeg( if input_frame_start is not None and input_frame_end is not None: is_sequence = int(input_frame_end) != int(input_frame_start) - input_info = get_oiio_info_for_input(first_input_path) + input_info = get_oiio_info_for_input(first_input_path, logger=logger) # Change compression only if source compression is "dwaa" or "dwab" # - they're not supported in ffmpeg @@ -488,13 +511,21 @@ def convert_for_ffmpeg( input_channels.append(alpha) input_channels_str = ",".join(input_channels) - oiio_cmd.extend([ + subimages = input_info.get("subimages") + input_arg = "-i" + if subimages is None or subimages == 1: # Tell oiiotool which channels should be loaded # - other channels are not loaded to memory so helps to avoid memory # leak issues - "-i:ch={}".format(input_channels_str), first_input_path, + # - this option is crashing if used on multipart/subimages exrs + input_arg += ":ch={}".format(input_channels_str) + + oiio_cmd.extend([ + input_arg, first_input_path, # Tell oiiotool which channels should be put to top stack (and output) - "--ch", channels_arg + "--ch", channels_arg, + # Use first subimage + "--subimage", "0" ]) # Add frame definitions to arguments @@ -588,7 +619,7 @@ def convert_input_paths_for_ffmpeg( " \".exr\" extension. Got \"{}\"." ).format(ext)) - input_info = get_oiio_info_for_input(first_input_path) + input_info = get_oiio_info_for_input(first_input_path, logger=logger) # Change compression only if source compression is "dwaa" or "dwab" # - they're not supported in ffmpeg @@ -606,12 +637,22 @@ def convert_input_paths_for_ffmpeg( red, green, blue, alpha = review_channels input_channels = [red, green, blue] + # TODO find subimage inder where rgba is available for multipart exrs channels_arg = "R={},G={},B={}".format(red, green, blue) if alpha is not None: channels_arg += ",A={}".format(alpha) input_channels.append(alpha) input_channels_str = ",".join(input_channels) + subimages = input_info.get("subimages") + input_arg = "-i" + if subimages is None or subimages == 1: + # Tell oiiotool which channels should be loaded + # - other channels are not loaded to memory so helps to avoid memory + # leak issues + # - this option is crashing if used on multipart exrs + input_arg += ":ch={}".format(input_channels_str) + for input_path in input_paths: # Prepare subprocess arguments oiio_cmd = [ @@ -625,13 +666,12 @@ def convert_input_paths_for_ffmpeg( oiio_cmd.extend(["--compression", compression]) oiio_cmd.extend([ - # Tell oiiotool which channels should be loaded - # - other channels are not loaded to memory so helps to - # avoid memory leak issues - "-i:ch={}".format(input_channels_str), input_path, + input_arg, input_path, # Tell oiiotool which channels should be put to top stack # (and output) - "--ch", channels_arg + "--ch", channels_arg, + # Use first subimage + "--subimage", "0" ]) for attr_name, attr_value in input_info["attribs"].items(): diff --git a/openpype/lib/vendor_bin_utils.py b/openpype/lib/vendor_bin_utils.py index 099f9a34ba0..b6797dbba0c 100644 --- a/openpype/lib/vendor_bin_utils.py +++ b/openpype/lib/vendor_bin_utils.py @@ -60,9 +60,10 @@ def find_executable(executable): path to file. Returns: - str: Full path to executable with extension (is file). - None: When the executable was not found. + Union[str, None]: Full path to executable with extension which was + found otherwise None. """ + # Skip if passed path is file if is_file_executable(executable): return executable @@ -70,24 +71,36 @@ def find_executable(executable): low_platform = platform.system().lower() _, ext = os.path.splitext(executable) - # Prepare variants for which it will be looked - variants = [executable] - # Add other extension variants only if passed executable does not have one - if not ext: + # Prepare extensions to check + exts = set() + if ext: + exts.add(ext.lower()) + + else: + # Add other possible extension variants only if passed executable + # does not have any if low_platform == "windows": - exts = [".exe", ".ps1", ".bat"] + exts |= {".exe", ".ps1", ".bat"} for ext in os.getenv("PATHEXT", "").split(os.pathsep): - ext = ext.lower() - if ext and ext not in exts: - exts.append(ext) - else: - exts = [".sh"] + exts.add(ext.lower()) - for ext in exts: - variant = executable + ext - if is_file_executable(variant): - return variant - variants.append(variant) + else: + exts |= {".sh"} + + # Executable is a path but there may be missing extension + # - this can happen primarily on windows where + # e.g. "ffmpeg" should be "ffmpeg.exe" + exe_dir, exe_filename = os.path.split(executable) + if exe_dir and os.path.isdir(exe_dir): + for filename in os.listdir(exe_dir): + filepath = os.path.join(exe_dir, filename) + basename, ext = os.path.splitext(filename) + if ( + basename == exe_filename + and ext.lower() in exts + and is_file_executable(filepath) + ): + return filepath # Get paths where to look for executable path_str = os.environ.get("PATH", None) @@ -97,13 +110,27 @@ def find_executable(executable): elif hasattr(os, "defpath"): path_str = os.defpath - if path_str: - paths = path_str.split(os.pathsep) - for path in paths: - for variant in variants: - filepath = os.path.abspath(os.path.join(path, variant)) - if is_file_executable(filepath): - return filepath + if not path_str: + return None + + paths = path_str.split(os.pathsep) + for path in paths: + if not os.path.isdir(path): + continue + for filename in os.listdir(path): + filepath = os.path.abspath(os.path.join(path, filename)) + # Filename matches executable exactly + if filename == executable and is_file_executable(filepath): + return filepath + + basename, ext = os.path.splitext(filename) + if ( + basename == executable + and ext.lower() in exts + and is_file_executable(filepath) + ): + return filepath + return None @@ -272,8 +299,8 @@ def get_oiio_tools_path(tool="oiiotool"): oiio_dir = get_vendor_bin_path("oiio") if platform.system().lower() == "linux": oiio_dir = os.path.join(oiio_dir, "bin") - default_path = os.path.join(oiio_dir, tool) - if _oiio_executable_validation(default_path): + default_path = find_executable(os.path.join(oiio_dir, tool)) + if default_path and _oiio_executable_validation(default_path): tool_executable_path = default_path # Look to PATH for the tool diff --git a/openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py similarity index 73% rename from openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py rename to openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py index ea109e9445c..038ee4fc034 100644 --- a/openpype/hosts/celaction/plugins/publish/submit_celaction_deadline.py +++ b/openpype/modules/deadline/plugins/publish/submit_celaction_deadline.py @@ -2,16 +2,14 @@ import re import json import getpass - import requests import pyblish.api -class ExtractCelactionDeadline(pyblish.api.InstancePlugin): +class CelactionSubmitDeadline(pyblish.api.InstancePlugin): """Submit CelAction2D scene to Deadline - Renders are submitted to a Deadline Web Service as - supplied via settings key "DEADLINE_REST_URL". + Renders are submitted to a Deadline Web Service. """ @@ -26,27 +24,21 @@ class ExtractCelactionDeadline(pyblish.api.InstancePlugin): deadline_pool_secondary = "" deadline_group = "" deadline_chunk_size = 1 - - enviro_filter = [ - "FTRACK_API_USER", - "FTRACK_API_KEY", - "FTRACK_SERVER" - ] + deadline_job_delay = "00:00:08:00" def process(self, instance): instance.data["toBeRenderedOn"] = "deadline" context = instance.context - deadline_url = ( - context.data["system_settings"] - ["modules"] - ["deadline"] - ["DEADLINE_REST_URL"] - ) - assert deadline_url, "Requires DEADLINE_REST_URL" + # get default deadline webservice url from deadline module + deadline_url = instance.context.data["defaultDeadline"] + # if custom one is set in instance, use that + if instance.data.get("deadlineUrl"): + deadline_url = instance.data.get("deadlineUrl") + assert deadline_url, "Requires Deadline Webservice URL" self.deadline_url = "{}/api/jobs".format(deadline_url) - self._comment = context.data.get("comment", "") + self._comment = instance.data["comment"] self._deadline_user = context.data.get( "deadlineUser", getpass.getuser()) self._frame_start = int(instance.data["frameStart"]) @@ -82,6 +74,26 @@ def payload_submit(self, render_dir = os.path.normpath(os.path.dirname(render_path)) render_path = os.path.normpath(render_path) script_name = os.path.basename(script_path) + + for item in instance.context: + if "workfile" in item.data["family"]: + msg = "Workfile (scene) must be published along" + assert item.data["publish"] is True, msg + + template_data = item.data.get("anatomyData") + rep = item.data.get("representations")[0].get("name") + template_data["representation"] = rep + template_data["ext"] = rep + template_data["comment"] = None + anatomy_filled = instance.context.data["anatomy"].format( + template_data) + template_filled = anatomy_filled["publish"]["path"] + script_path = os.path.normpath(template_filled) + + self.log.info( + "Using published scene for render {}".format(script_path) + ) + jobname = "%s - %s" % (script_name, instance.name) output_filename_0 = self.preview_fname(render_path) @@ -98,7 +110,7 @@ def payload_submit(self, chunk_size = self.deadline_chunk_size # search for %02d pattern in name, and padding number - search_results = re.search(r"(.%0)(\d)(d)[._]", render_path).groups() + search_results = re.search(r"(%0)(\d)(d)[._]", render_path).groups() split_patern = "".join(search_results) padding_number = int(search_results[1]) @@ -145,10 +157,11 @@ def payload_submit(self, # frames from Deadline Monitor "OutputFilename0": output_filename_0.replace("\\", "/"), - # # Asset dependency to wait for at least the scene file to sync. + # # Asset dependency to wait for at least + # the scene file to sync. # "AssetDependency0": script_path "ScheduledType": "Once", - "JobDelay": "00:00:08:00" + "JobDelay": self.deadline_job_delay }, "PluginInfo": { # Input @@ -173,19 +186,6 @@ def payload_submit(self, plugin = payload["JobInfo"]["Plugin"] self.log.info("using render plugin : {}".format(plugin)) - i = 0 - for key, values in dict(os.environ).items(): - if key.upper() in self.enviro_filter: - payload["JobInfo"].update( - { - "EnvironmentKeyValue%d" - % i: "{key}={value}".format( - key=key, value=values - ) - } - ) - i += 1 - self.log.info("Submitting..") self.log.info(json.dumps(payload, indent=4, sort_keys=True)) @@ -193,10 +193,15 @@ def payload_submit(self, self.expected_files(instance, render_path) self.log.debug("__ expectedFiles: `{}`".format( instance.data["expectedFiles"])) + response = requests.post(self.deadline_url, json=payload) if not response.ok: - raise Exception(response.text) + self.log.error( + "Submission failed! [{}] {}".format( + response.status_code, response.content)) + self.log.debug(payload) + raise SystemExit(response.text) return response @@ -234,32 +239,29 @@ def preview_fname(self, path): split_path = path.split(split_patern) hashes = "#" * int(search_results[1]) return "".join([split_path[0], hashes, split_path[-1]]) - if "#" in path: - self.log.debug("_ path: `{}`".format(path)) - return path - else: - return path - def expected_files(self, - instance, - path): + self.log.debug("_ path: `{}`".format(path)) + return path + + def expected_files(self, instance, filepath): """ Create expected files in instance data """ if not instance.data.get("expectedFiles"): - instance.data["expectedFiles"] = list() + instance.data["expectedFiles"] = [] - dir = os.path.dirname(path) - file = os.path.basename(path) + dirpath = os.path.dirname(filepath) + filename = os.path.basename(filepath) - if "#" in file: - pparts = file.split("#") + if "#" in filename: + pparts = filename.split("#") padding = "%0{}d".format(len(pparts) - 1) - file = pparts[0] + padding + pparts[-1] + filename = pparts[0] + padding + pparts[-1] - if "%" not in file: - instance.data["expectedFiles"].append(path) + if "%" not in filename: + instance.data["expectedFiles"].append(filepath) return for i in range(self._frame_start, (self._frame_end + 1)): instance.data["expectedFiles"].append( - os.path.join(dir, (file % i)).replace("\\", "/")) + os.path.join(dirpath, (filename % i)).replace("\\", "/") + ) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 35f2532c16a..45688e85849 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -241,6 +241,7 @@ def _submit_deadline_post_job(self, instance, job, instances): environment["AVALON_ASSET"] = legacy_io.Session["AVALON_ASSET"] environment["AVALON_TASK"] = legacy_io.Session["AVALON_TASK"] environment["AVALON_APP_NAME"] = os.environ.get("AVALON_APP_NAME") + environment["OPENPYPE_VERSION"] = os.environ.get("OPENPYPE_VERSION") environment["OPENPYPE_LOG_NO_COLORS"] = "1" environment["OPENPYPE_USERNAME"] = instance.context.data["user"] environment["OPENPYPE_PUBLISH_JOB"] = "1" @@ -494,12 +495,13 @@ def _create_instances_for_aov(self, instance_data, exp_files): else: render_file_name = os.path.basename(col) aov_patterns = self.aov_filter - preview = match_aov_pattern(app, aov_patterns, render_file_name) + preview = match_aov_pattern(app, aov_patterns, render_file_name) # toggle preview on if multipart is on + if instance_data.get("multipartExr"): preview = True - + self.log.debug("preview:{}".format(preview)) new_instance = deepcopy(instance_data) new_instance["subset"] = subset_name new_instance["subsetGroup"] = group_name @@ -542,7 +544,7 @@ def _create_instances_for_aov(self, instance_data, exp_files): if new_instance.get("extendFrames", False): self._copy_extend_frames(new_instance, rep) instances.append(new_instance) - + self.log.debug("instances:{}".format(instances)) return instances def _get_representations(self, instance, exp_files): @@ -775,6 +777,7 @@ def process(self, instance): "handleEnd": handle_end, "frameStartHandle": start - handle_start, "frameEndHandle": end + handle_end, + "comment": instance.data["comment"], "fps": fps, "source": source, "extendFrames": data.get("extendFrames"), diff --git a/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.ico new file mode 100644 index 0000000000000000000000000000000000000000..39d61592fe1addb07ed3ef93de362370485a23b9 GIT binary patch literal 103192 zcmeHQ2V4_L7oQ+17Eb(ZC>9jYh9aJ*CI!Yvtch-C}%GyqF4^>y<&9SCuIvvd)< zyn}(Z{5E{Be(h$pp=tdg_5&PnE+#$Pwd3M5(5L>QAp?FM?UI3!p_ z9*8`qYq`0$MK^tmffk0xHhFINd+5)x4>?bta1u{GnrxB#e4a(@6_1Wb=1x7gZK{30 z4oB?j%rLKBZ~mJ$-ri`WK@-vR>td5!SF1CNKeFsoyjQHrad{on_@E2l@k07({*I8r zZGeF?9Co)mCRH~+_Q~SvPrD~HFB?@a#(9pO(Ph+?NzK;wM@JHp2Mv!q>)T;Z z#><(HHePt##ChLlCj;b?y!?EJ9ZTk>@(n{fI#2RBzwOtg57Vr5QCRv(;fPt++K;%g zvY&B^9%{~x7q!>>>xBt2pTItBtUGNxvcIv&m(p!*FmXq=j?@FEe@vh{l4@JZ@*+$18?8#QTLrLpi{|1*jAsSr?~a%;lS=?HIvnAP4M_@ zu9=2A8Zj-5;zWBs1UOD?mc7F6gHf{k@oq+eBU+@e%$Na*bEpZ;g`<6Zmn4Z&jE1K8 zHg!%ozio%;Q`DvWK{gE=n(8tw`nc_-a*jQn*r2ajbStY@)%N#XVbuHYAUhz8}v zi9X+{?c`gx+57tKHm;9o%rNgV%gxgR?XO{BJ1C;BM{v@d_p{wRx1!D0vd4~e$cTC6 z>)!K``|ro8VeAn#ZS)$Bt5##>`!pZXn?F1D{>g1cr`*Gr>@%j06LSM%wrv-WPR!#6 zj%anu7`>fk&rUEqxY%{C`~IxqsQ2HsXJu7!ePb zpKmyboztS(L)YQ#_VrCYf(==nso?0t;Vaty8q{&hy?}`Q|BJQ_)$_2_Te$Q7`Heko zkB6RW$uL-EVcTy28qhS_Yv<>lP7RaicYY8us{YAK!OksD3w!+#;L~#XEVs>i41>O{ zALkfZ#!vC%x+`+We>=F3*+M*A;_75%#j?O)Fr zwZ?5#yNL7V$8GmKdvqf^e(=)IW_B+&da@EjT!x0Zoci)<&zpmKy1f{8B1AR=n(sqX zU(UX`sYiI%{GcWlwmP?J8`s79Xwv-b?XkLbijh@GmZg*>T>xXo= zqz!(wu%2;;);*~CyVC}{xEL*TywA=*ckJmOLD5IIvyKVtqp|3nOYiWQMb6^5q_eCv ze(1kxU>4g}qvO-I);on5=CyrWjavH)3hy^h>^Eh_Y`0+zO_A-Vo{M)49dmbNdU8*% zcu_*Dr6(3MMK;44Gg6;@W<9pu;?Q>mH!f3fHte^JE1F$vojo5|G1Nm{FX-pY*je-T z-M$`EFZ_^mLRjByELvwCUT@TnMs~JI7u!U8%^r+`dpCXGEh8-*?=* z+n`g8l}w$n$p32ZklksUs$1*3OtWME`RAUG?*|#B2(#;WzjnLD8a|)_V`0Cui^G4- zYT}Ul_`(CLYvRVkR&N?~DJ6&9Ib1KwZTw)gdGov;x$EW&8a)<Q+u<%9EzvHw^!@3A-{!ht1JCb$ed+iFS_b>s(HV4?Bdz1qwA~_lRHOyb$Gkm zV}1=!!DRbZ;ifzL9zXH2{#m!ve^9@N+s5BDt~bDl5gX%Sz*?U+xbDJQu`!uzMtX*I zi<>*!Ekf7SW2(re`2e=bAV4u>BZ|p2nW=~TH*P@DUWRX5dW@U<_tsfU#(2)=&g-7m z4j6BPMC1MZhu-w-`-6^a|9~*F7^@~TmW4GGTnTKpa&G!R$(<%x1xB-n8=El(EweTc zXglrdtsKYD_CrH|Ocags>AnBsmQZV+$;g?nBd=^1j*SrAVMlq6cDb<1LYLWL-gf=| z?t^k7=d@dJ(=+(Z|Cs((+dD_M`Z@BS&Q=$fCH)bh&qz%ec5cQ^UC!~Lfu@^{beO$% zUz^0p+s`rNcVyaMMjx6uIIPO|dEdlg*N}SQp>ZD#AHH7QY4xU*G=0a(_CvDc{**&O3LfEm-DxIww5PXK&?8ZfZp?t?%lpF z#o$cy<`19c$5{8eb@4;|U&+ardfW_a#_DC;yOAI6)u*LRy%Wz*m>?a8HLPLtKZd&q z`%md@xhtz)2(>e1f=!pZCrADD)WP+YcfBUdu8^qcF?4n*ruyzI7^on->2 zK+5qy2X%UFfTp%a{YI}69567vlX7(L?JuXn){{;SyBLtIb81q?nD9p|p2=TYm0`>B(nMtog^zK%MAQ#|K!NOuEbMeX;^{$K2-Cw_sy;FHS# z`mlkXFE3B3;l|v*Hzf8)$(d)q^_lRPp=*E7*)Pqix0%Md6!LG{`+eK;1Ap0KH2)W` zx#zuuCiOekV(26o*PQp^*(iND8MV@_?cih6kOP;JaTo8{{I=w@_nv?m&WHD&p75rr zS2eG!;U=ct&n_F8>6`5^IWBK{{j=PQV@?{-um7LpIN#oe=!bAQGg0>zT{c=<_BRnbm zDC}}G*=+Oi`AIf+53!xIbWGPiF+j%?XOC;cok&HnA8$CcX;_nNvBdhat?)}UuQJrusf ze_c$H*_~&*=VxE1!jc|rpLBkAC!SlkW23yApPq4ZeQ;|ea@Z9#X|1VqORi(J(+6+A z$nS9}ePWR7jY~dD?k=yjs6P_STpVb+@Z_Vgs3uMBrT8b+zHJ}R%V=Y$%Un9~@+5R@ zJ7WF%%z0dDWbl{36%!i?lKrFfZ3kW+R!4`~b$VvtFAG1M*^@G1ot|lskz1#^`Ad_3 z`J;326kU%gBbHjcuF;^k+orc;=Ca2;NqaIr|LKia>>drC7#$sSPHH7E)9yAid3n=I zJ`EkiCNx_KLpRTQbilI*gL9pKbLP32F>Q668qWQDaa+oAvqQrB;U@X3F0!9_&v};S z9kA?Zde)9>^YblM8^-TCK7Z=RFKrDN=I5RXr?u_xbgv`-EcaBpYjgKNn*{bhYcCG6 z*%iZBdSj)y!-H8ahO->+AHF`tYxMM4OP+DPQ>I>@!?yQ1`(Vz@&_9#1@2x%X>ob#U zBHOdqFGgAGTJ+H|?y#cS@4fd#ZrRGLxxv%ok1nHlj_18yGcs?i2^gP~@B5DRAjzgj zw>Y0mm-H_*qOG>Hq0kqbKv z1wfm@@OwSHbNm9s+=Uh^FudoiD|`!WeH+Qk$TwS-32O_9c}qMSd1;lN{e!09`X}c<2DKCi=s{!nXR@f`OIp&_*UREZ5_!TjGEmjT;r2>F_t|DPfEfEd-X9iR(y(8W&B*^zRR z@TY6M`-=0#P}KbAwav?Ny% zf#TNx1&Q-0(Wekfl*9byDDTZ?igT;3(y>QTJ`z`rfHM6HUe2Pv^&{`YexiTXs#Pmj znf~+Bk9_Our^NhJqJOnCthx}Z)^sgG9 zs-8>qU)43NT0WwG)%aBPT%!M~u3^>k5&f&ir>f@?{a1AjtCo-GUo}2eJ(uXes%u!a zd_@1M@u}*$ME_M?!>Z*Y`d5ulRnH~*uj(3BEuXgP{|oOG@&8b*TD5YO>7O6>+c)q3 zBYf>a^sknNRTomGf9yxgkF}?;j~(^}`__p6+)=6SiT|%^T`S8iPXF5K6Ru1~l2$wd zME}KO^F8Dc{eKTyE?!2W|KhRv9&%*$&%I|%3E|xu?xUVX=2x6swIz8a%>{*Twm@A* zk@~!ternAld9j0v)Rpx8%B4vCymvb(UX+Dg`R_qznv{{&KYpJ|EKHZ&_u-HmX)cE= zo)2;#bb$9RUYB@pn4fW)Vu$M%sV_)cQes_7#HM)BRz>P^A`Gc7+=Qa_#rYKXVdui~ zn#AUNXp7cAFUC$D+tTVmzBZMgd0x2aNepsAfF#Us!*@={U{!%M65eQErZdy|cq_IG*uE^S{`xxD1O4?_vE<2~3w;kR3=vh3fx zY%oq5gWlzIPul^E2^Yvd%2n4w;2iG&x=yF{&AHp0;=;W@8}9$;G-3l)Q~!#(=Or$r zgs{#cgnM;?;2p(*_1!`nfH`5#Kd5fiMB@9C3Xkca+rsa0aigszddIpiq`WMO1L-n0 z5J7C9YS;?QoAEWjP`5l-S1HuHtPQ|=8nJ<@p>stkVEixq9tQ`W?+JzfQUZ9MSA;n6 z6z5T|LR$g5Nr3x>c8cwhxOO2>wKbtpRy$X*lF_??93 zkW~|z??);;rvp7ksG)CVV?sKy0qwUHsSP~&&juJ2=Wa8K1J6>G(tQ(ITS$jBg3j8H z!uKtK#0EmpW!eVt{l5@=zm(n=z&!%KXQ0bMh`-}3`HtK7jiPo-kJkn8dLOOVl5Btv zK9_V^VguT(dzyQR4TPdIv|jO=umF55iyH?NZ2;z!ztL@xPmAbdnG5F(+Cu?f!s&Mj{}((6m4KU-3|%07Xh#V7y4Sj20uFiE24NC_!vOp zGIV*UyHI;2C!Es=U_H`0#rowXEuy#yPD+WR@V*e#-AuPbLhVHW^f(V}AivNKWS$?D zltyeod-X2MIiA;*!v=^ynJk!E`dexPqLR*eW%)1;_z-_GEmgb>+h7CZK)-BS$6R=y zm!E#P7&!v2l{P^9$+T35uZ}* zXUa>W@O^`V4G_Q5id9jCw~lQ9t_P0_^%|}ttK*V>XX0x~g)>Pys89rmPho|^s}+rx z*9Kr5vCs-Z36sSlkQ0RTU_6)cD}{Y!RQZ+4?=wey3X4Tndoq;y6yoDnVH;j}meq&|fQ=sBS(e=;8U70~5y2-lH@#kEV)wF?2b7O3_q%+H~)ZP5CM z^1<3AA>prTAL3x@Si`K5xzMI3IlP<6inSJSMlF|eLf&f8)AV3fx z2oMAa0t5kq06~BtKoB4Z5CjMU1Ob8oL4Y7Y5FiK;1PB5I0fGQQfFM8+AP5iy2m%BF zf&f9_>mvZKx{#x^2q0t#n?$(N)x-!H=pkfF$3+O4>&nNCOBt7|uh>3(OdR)5cUSHZ z+Q)}1|0GWED6>5X+;gbc@JAo4!j=kBQtQz<1tGJheB7g&d>k_R3y&;3&}GeK=KTT6c~W>MXo(ckpZRx1E~rOuoM^&C@>&WV4z5cGJ}tV zldmpu=^2n$fg%DT1%$*3aY_ME%EMh=1x)!U^-dv9qRJ7#8(i1{c>pnxG5m+a4xt8h z3P=(s{|EvE0fGQQfFM8+AP5iy%8dZLU)U=*3^}Q!P}CN_kF5)3e*qc+@1^O%UPMMJ z>)tJR)By0#RD=J=tPoTO%6*n8n+NY` z;=MfsWzv-EKMW;S5c}nB7WCQ%`1c2jQAKdRcY(5fp$x_=0)CHxa{|jBrK%p}1p!W` zX5I^1lzI#`# za6HrqvlY+?$ex4c=fR%4nm+i?Cu@!TeAstd(}y5wA(3BtZdHKkYNScp{)zsH{gd-w zvj~v!kBtBH+)5*Zbp=AYt0%zw8norPHV3Jk~!cbOA22HH9@?`wuQ3ewktq& z8flW&KjyI_Nh8cwKqC_QnR*mAvTLFI-1}{d*Z}u_o5Fma5w$-S@SJYd$K@-xrNib#p`vMAo8UQ!I2 zQC~Q5a``yUxznfs11KlXXHog-$G&y>-?GDXDPD9RiU)g)OU5nwz8$Aa(r|hKaie~c z$b&KB+-XplUYIX)o^bCOQ*1c5pe#=##;V~)U|q60tj%o1>oc&Hy-)d(X^9BsViROti}lXWyjbwP0dXG1eKrwezyfS~m8)?rs0=b%fvj9^tUbkpH7ZQp zL}!Y9`v;5x7xJt8o|_<}4c0dhmRXT_kTzTkR9P+-0R#N}GK&p!H{P2q6z`pvgt#we zVLXJ8KdxLIXhCHd^SXen8QdqoP(qlq(cd58z1fU05CH~GS|CQbSOgi{f~@cGclU&% z+(Hca@Am=*3^C--DOU$tP#I)w2^b`CVBX6A7y#cN@Rtzx=@@!!aMJ=Y%EcmJU<4S% z0nsumh8xS(ffiJTXZnpme#y6~IB=hfzeh?F8}19iU&i}e_UF+A#PI#@dVs+_4y*&< z?{U&&qTo{~#lBSsW8eqh->IgFzP{Bq;9Mx2BMZ0*|6u_4`E0n)hy0UZD>c4V{x!iD z$X65W;0nH{$KPQQ!gqKu@pHlf1Ic>N6)00%6McQFZD1XPu1~{tP;o4z<6+oVzE%D; z!52Q)3owA|0;~(MZyWqax9VaX?%**)6McQFZQwB<#=YJ68i3~q^q9z(uI~W`?%yi! zn&b=Q+>C92|L8>TI(uyo;MY;j8G)_tL zDX)SKH1Z$s6JZYI1XQ?BL|d{?1i(QKf&f8)AW*3Yz`BSD&}E?eO6VM{nY4oR^SBJG z9km5NsY6Q2;kwtM>{nk~f__f`(d%1^;SR{dM5iBv->x!oI-L%$eQjj`FKblr18ezc z7LZt42AAC<{Y~>M8}9hp{DW^=HQ;9`*oC7K8VB!U<7W~BAa4lkhQx=@K}k7BSg#%q z&o8QfZT_{rKfr!IP?Cn|tNRTA-`9XBLE?N$^!a|7NSNYEp9A4vTc0%lqE9!=JYW98 zi6{J*QE>!+3Vhp%{glys@bfXr_@nLTZ+_P0GLJtZ*4;`UfABjwyl5*R>rxZvPQyy) zpB=7Kz(Y9r9N_mTXrg^Fm^J@P!@nhSQt;D|FIOqoue`TjRNjYul06{s`%+@TdrB-~ z1LabJq(u^5@%w>d@TZZa=YOK`{u5XReC@?)(sySng@61`5auoKy$|rDBYE#fEO=YM zFYqczoF|oGEW8)=u^=7Zk>bS;hIEERhP)4d<1&@OKi<>iDNwwW@b8y0aXS4uq$Max z$7SjIu4Nqu%Tf>e#Xi~TKu5u+rqI6(_S!14hZxN55m1qIWo664OKIx>&vxJ&$L6OW zrMLy}zFq?UYL&K~$|?{1`(U3>0p}?tV8>G2nEu#)gW+CFD}0-PPrOIlKR}XmQzpEn zEizzxf%{f}&V3W`F**hB=$wc5QOjGnwF-m$|?(dRtFh;xv@hi0qY*cXWgN=&nIEKdI5Q@D~qqv^b5A61N_Ra zRng}~Dy08dnzkydEcjkxzr#)JJB`HH*N_kQ#QYEY@f;@&&Ihg7SMGB#$N)ZboXD{|UU}063;I%+FUd!&aM%hs!6Ej>f>40HtsoR( z?<)w!*o_N93j2ZK5Pzu%hfLdo5I2D10%$-ILIXGyOG8SUP8}{tr{KW(>V?=(%NWQG zsL)T#fcR+v9ONhi0x(x;2lpE0GSnK<8Ui&2Y9*6y4)G4~&YH#!nB$EHlD%iZzn22x z{gd&&@C$(y^QUWy0cg87&}$&PHcHbn_vwhUM+m>q3QmY;^&6KS1X_o9ZHzI8lDdxrUWnkfOWJQlo0ms!EwCrJm=OA65k(? zUn9@1z#k2-qk6#@yB5e2$VwXG-^Ir@^xOpc-o=M!soa;d^I+{~tfs~kb=!gdC&Jog zKHeJ!_VoV(agANq)aD@+jp0Q1$rFHI0WW2Z7;tb_x2~qjf!ss5&nNQ)+!qx0#h=)| zS81wkb=v{Ce};ZJ92f^UFb=T8t8dWcJVzZqfCo!>?>z_a7sdbg1n^LE=ht3C0u0PK zk8Lt|$u1Np$^z@+5X||$>YgmnpDEz~p7&}QJfoBNG<@~%Jb~P8I8i?(bK^(iv0;=t z`qOk7z@aJd(L>YilJ>q21d7b5n6Pf)Atjti|5g;GAm=gnjq-0sSE*{2Qm0_$&y@`~b8V2;avJ zlM?=o(=a3|4=7{7e>$F>mv&f=_|F{KkY}l~GGsy->%8~7<&^=kyfi;Np4+Ka|J|;l z^*{KkU7!1~6U-S$O8f))Psd8`oACbKYWDv#|LK@ZaIbej@N%vn?4f*spD^7I$DzJS zN&ItT2HfC8cEi3{CF)99TQJ@?;=Y{wnD_dxL};U>vN9#5K^q2~du{IVUVFTO>v)-b z0-*1@oM`JXZrpHu&t0bdP@tY3=TYx1Sf-MAuB`lmwNHdt;0;wAeTMZ>7LdRlA(1UY JVj#NS{{bY%R3rcZ literal 0 HcmV?d00001 diff --git a/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param new file mode 100644 index 00000000000..24c59d20054 --- /dev/null +++ b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.param @@ -0,0 +1,38 @@ +[About] +Type=label +Label=About +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=Celaction Plugin for Deadline +Description=Not configurable + +[ConcurrentTasks] +Type=label +Label=ConcurrentTasks +Category=About Plugin +CategoryOrder=-1 +Index=0 +Default=True +Description=Not configurable + +[Executable] +Type=filename +Label=Executable +Category=Config +CategoryOrder=0 +CategoryIndex=0 +Description=The command executable to run +Required=false +DisableIfBlank=true + +[RenderNameSeparator] +Type=string +Label=RenderNameSeparator +Category=Config +CategoryOrder=0 +CategoryIndex=1 +Description=The separator to use for naming +Required=false +DisableIfBlank=true +Default=. diff --git a/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py new file mode 100644 index 00000000000..2d0edd3dca3 --- /dev/null +++ b/openpype/modules/deadline/repository/custom/plugins/CelAction/CelAction.py @@ -0,0 +1,122 @@ +from System.Text.RegularExpressions import * + +from Deadline.Plugins import * +from Deadline.Scripting import * + +import _winreg + +###################################################################### +# This is the function that Deadline calls to get an instance of the +# main DeadlinePlugin class. +###################################################################### + + +def GetDeadlinePlugin(): + return CelActionPlugin() + + +def CleanupDeadlinePlugin(deadlinePlugin): + deadlinePlugin.Cleanup() + +###################################################################### +# This is the main DeadlinePlugin class for the CelAction plugin. +###################################################################### + + +class CelActionPlugin(DeadlinePlugin): + + def __init__(self): + self.InitializeProcessCallback += self.InitializeProcess + self.RenderExecutableCallback += self.RenderExecutable + self.RenderArgumentCallback += self.RenderArgument + self.StartupDirectoryCallback += self.StartupDirectory + + def Cleanup(self): + for stdoutHandler in self.StdoutHandlers: + del stdoutHandler.HandleCallback + + del self.InitializeProcessCallback + del self.RenderExecutableCallback + del self.RenderArgumentCallback + del self.StartupDirectoryCallback + + def GetCelActionRegistryKey(self): + # Modify registry for frame separation + path = r'Software\CelAction\CelAction2D\User Settings' + _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, path) + regKey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, path, 0, + _winreg.KEY_ALL_ACCESS) + return regKey + + def GetSeparatorValue(self, regKey): + useSeparator, _ = _winreg.QueryValueEx( + regKey, 'RenderNameUseSeparator') + separator, _ = _winreg.QueryValueEx(regKey, 'RenderNameSeparator') + + return useSeparator, separator + + def SetSeparatorValue(self, regKey, useSeparator, separator): + _winreg.SetValueEx(regKey, 'RenderNameUseSeparator', + 0, _winreg.REG_DWORD, useSeparator) + _winreg.SetValueEx(regKey, 'RenderNameSeparator', + 0, _winreg.REG_SZ, separator) + + def InitializeProcess(self): + # Set the plugin specific settings. + self.SingleFramesOnly = False + + # Set the process specific settings. + self.StdoutHandling = True + self.PopupHandling = True + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Rendering.*") + self.AddPopupIgnorer(".*AutoRender.*") + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Wait.*") + + # Ignore 'celaction' Pop-up dialog + self.AddPopupIgnorer(".*Timeline Scrub.*") + + celActionRegKey = self.GetCelActionRegistryKey() + + self.SetSeparatorValue(celActionRegKey, 1, self.GetConfigEntryWithDefault( + "RenderNameSeparator", ".").strip()) + + def RenderExecutable(self): + return RepositoryUtils.CheckPathMapping(self.GetConfigEntry("Executable").strip()) + + def RenderArgument(self): + arguments = RepositoryUtils.CheckPathMapping( + self.GetPluginInfoEntry("Arguments").strip()) + arguments = arguments.replace( + "", str(self.GetStartFrame())) + arguments = arguments.replace("", str(self.GetEndFrame())) + arguments = self.ReplacePaddedFrame( + arguments, "", self.GetStartFrame()) + arguments = self.ReplacePaddedFrame( + arguments, "", self.GetEndFrame()) + arguments = arguments.replace("", "\"") + return arguments + + def StartupDirectory(self): + return self.GetPluginInfoEntryWithDefault("StartupDirectory", "").strip() + + def ReplacePaddedFrame(self, arguments, pattern, frame): + frameRegex = Regex(pattern) + while True: + frameMatch = frameRegex.Match(arguments) + if frameMatch.Success: + paddingSize = int(frameMatch.Groups[1].Value) + if paddingSize > 0: + padding = StringUtils.ToZeroPaddedString( + frame, paddingSize, False) + else: + padding = str(frame) + arguments = arguments.replace( + frameMatch.Groups[0].Value, padding) + else: + break + + return arguments diff --git a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py index 9b35c9502dc..40193bac71b 100644 --- a/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py +++ b/openpype/modules/deadline/repository/custom/plugins/GlobalJobPreLoad.py @@ -14,6 +14,137 @@ ProcessUtils, ) +VERSION_REGEX = re.compile( + r"(?P0|[1-9]\d*)" + r"\.(?P0|[1-9]\d*)" + r"\.(?P0|[1-9]\d*)" + r"(?:-(?P[a-zA-Z\d\-.]*))?" + r"(?:\+(?P[a-zA-Z\d\-.]*))?" +) + + +class OpenPypeVersion: + """Fake semver version class for OpenPype version purposes. + + The version + """ + def __init__(self, major, minor, patch, prerelease, origin=None): + self.major = major + self.minor = minor + self.patch = patch + self.prerelease = prerelease + + is_valid = True + if not major or not minor or not patch: + is_valid = False + self.is_valid = is_valid + + if origin is None: + base = "{}.{}.{}".format(str(major), str(minor), str(patch)) + if not prerelease: + origin = base + else: + origin = "{}-{}".format(base, str(prerelease)) + + self.origin = origin + + @classmethod + def from_string(cls, version): + """Create an object of version from string. + + Args: + version (str): Version as a string. + + Returns: + Union[OpenPypeVersion, None]: Version object if input is nonempty + string otherwise None. + """ + + if not version: + return None + valid_parts = VERSION_REGEX.findall(version) + if len(valid_parts) != 1: + # Return invalid version with filled 'origin' attribute + return cls(None, None, None, None, origin=str(version)) + + # Unpack found version + major, minor, patch, pre, post = valid_parts[0] + prerelease = pre + # Post release is not important anymore and should be considered as + # part of prerelease + # - comparison is implemented to find suitable build and builds should + # never contain prerelease part so "not proper" parsing is + # acceptable for this use case. + if post: + prerelease = "{}+{}".format(pre, post) + + return cls( + int(major), int(minor), int(patch), prerelease, origin=version + ) + + def has_compatible_release(self, other): + """Version has compatible release as other version. + + Both major and minor versions must be exactly the same. In that case + a build can be considered as release compatible with any version. + + Args: + other (OpenPypeVersion): Other version. + + Returns: + bool: Version is release compatible with other version. + """ + + if self.is_valid and other.is_valid: + return self.major == other.major and self.minor == other.minor + return False + + def __bool__(self): + return self.is_valid + + def __repr__(self): + return "<{} {}>".format(self.__class__.__name__, self.origin) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return self.origin == other + return self.origin == other.origin + + def __lt__(self, other): + if not isinstance(other, self.__class__): + return None + + if not self.is_valid: + return True + + if not other.is_valid: + return False + + if self.origin == other.origin: + return None + + same_major = self.major == other.major + if not same_major: + return self.major < other.major + + same_minor = self.minor == other.minor + if not same_minor: + return self.minor < other.minor + + same_patch = self.patch == other.patch + if not same_patch: + return self.patch < other.patch + + if not self.prerelease: + return False + + if not other.prerelease: + return True + + pres = [self.prerelease, other.prerelease] + pres.sort() + return pres[0] == self.prerelease + def get_openpype_version_from_path(path, build=True): """Get OpenPype version from provided path. @@ -21,9 +152,9 @@ def get_openpype_version_from_path(path, build=True): build (bool, optional): Get only builds, not sources Returns: - str or None: version of OpenPype if found. - + Union[OpenPypeVersion, None]: version of OpenPype if found. """ + # fix path for application bundle on macos if platform.system().lower() == "darwin": path = os.path.join(path, "Contents", "MacOS", "lib", "Python") @@ -46,8 +177,10 @@ def get_openpype_version_from_path(path, build=True): with open(version_file, "r") as vf: exec(vf.read(), version) - version_match = re.search(r"(\d+\.\d+.\d+).*", version["__version__"]) - return version_match[1] + version_str = version.get("__version__") + if version_str: + return OpenPypeVersion.from_string(version_str) + return None def get_openpype_executable(): @@ -59,6 +192,91 @@ def get_openpype_executable(): return exe_list, dir_list +def get_openpype_versions(dir_list): + print(">>> Getting OpenPype executable ...") + openpype_versions = [] + + install_dir = DirectoryUtils.SearchDirectoryList(dir_list) + if install_dir: + print("--- Looking for OpenPype at: {}".format(install_dir)) + sub_dirs = [ + f.path for f in os.scandir(install_dir) + if f.is_dir() + ] + for subdir in sub_dirs: + version = get_openpype_version_from_path(subdir) + if not version: + continue + print(" - found: {} - {}".format(version, subdir)) + openpype_versions.append((version, subdir)) + return openpype_versions + + +def get_requested_openpype_executable( + exe, dir_list, requested_version +): + requested_version_obj = OpenPypeVersion.from_string(requested_version) + if not requested_version_obj: + print(( + ">>> Requested version does not match version regex \"{}\"" + ).format(VERSION_REGEX)) + return None + + print(( + ">>> Scanning for compatible requested version {}" + ).format(requested_version)) + openpype_versions = get_openpype_versions(dir_list) + if not openpype_versions: + return None + + # if looking for requested compatible version, + # add the implicitly specified to the list too. + if exe: + exe_dir = os.path.dirname(exe) + print("Looking for OpenPype at: {}".format(exe_dir)) + version = get_openpype_version_from_path(exe_dir) + if version: + print(" - found: {} - {}".format(version, exe_dir)) + openpype_versions.append((version, exe_dir)) + + matching_item = None + compatible_versions = [] + for version_item in openpype_versions: + version, version_dir = version_item + if requested_version_obj.has_compatible_release(version): + compatible_versions.append(version_item) + if version == requested_version_obj: + # Store version item if version match exactly + # - break if is found matching version + matching_item = version_item + break + + if not compatible_versions: + return None + + compatible_versions.sort(key=lambda item: item[0]) + if matching_item: + version, version_dir = matching_item + print(( + "*** Found exact match build version {} in {}" + ).format(version_dir, version)) + + else: + version, version_dir = compatible_versions[-1] + + print(( + "*** Latest compatible version found is {} in {}" + ).format(version_dir, version)) + + # create list of executables for different platform and let + # Deadline decide. + exe_list = [ + os.path.join(version_dir, "openpype_console.exe"), + os.path.join(version_dir, "openpype_console") + ] + return FileUtils.SearchFileList(";".join(exe_list)) + + def inject_openpype_environment(deadlinePlugin): """ Pull env vars from OpenPype and push them to rendering process. @@ -68,93 +286,29 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Injecting OpenPype environments ...") try: - print(">>> Getting OpenPype executable ...") exe_list, dir_list = get_openpype_executable() - openpype_versions = [] - # if the job requires specific OpenPype version, - # lets go over all available and find compatible build. - requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") - if requested_version: - print(( - ">>> Scanning for compatible requested version {}" - ).format(requested_version)) - install_dir = DirectoryUtils.SearchDirectoryList(dir_list) - if install_dir: - print("--- Looking for OpenPype at: {}".format(install_dir)) - sub_dirs = [ - f.path for f in os.scandir(install_dir) - if f.is_dir() - ] - for subdir in sub_dirs: - version = get_openpype_version_from_path(subdir) - if not version: - continue - print(" - found: {} - {}".format(version, subdir)) - openpype_versions.append((version, subdir)) - exe = FileUtils.SearchFileList(exe_list) - if openpype_versions: - # if looking for requested compatible version, - # add the implicitly specified to the list too. - print("Looking for OpenPype at: {}".format(os.path.dirname(exe))) - version = get_openpype_version_from_path( - os.path.dirname(exe)) - if version: - print(" - found: {} - {}".format( - version, os.path.dirname(exe) - )) - openpype_versions.append((version, os.path.dirname(exe))) + requested_version = job.GetJobEnvironmentKeyValue("OPENPYPE_VERSION") if requested_version: - # sort detected versions - if openpype_versions: - # use natural sorting - openpype_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) - print(( - "*** Latest available version found is {}" - ).format(openpype_versions[-1][0])) - requested_major, requested_minor, _ = requested_version.split(".")[:3] # noqa: E501 - compatible_versions = [] - for version in openpype_versions: - v = version[0].split(".")[:3] - if v[0] == requested_major and v[1] == requested_minor: - compatible_versions.append(version) - if not compatible_versions: - raise RuntimeError( - ("Cannot find compatible version available " - "for version {} requested by the job. " - "Please add it through plugin configuration " - "in Deadline or install it to configured " - "directory.").format(requested_version)) - # sort compatible versions nad pick the last one - compatible_versions.sort( - key=lambda ver: [ - int(t) if t.isdigit() else t.lower() - for t in re.split(r"(\d+)", ver[0]) - ]) - print(( - "*** Latest compatible version found is {}" - ).format(compatible_versions[-1][0])) - # create list of executables for different platform and let - # Deadline decide. - exe_list = [ - os.path.join( - compatible_versions[-1][1], "openpype_console.exe"), - os.path.join( - compatible_versions[-1][1], "openpype_console") - ] - exe = FileUtils.SearchFileList(";".join(exe_list)) - if exe == "": - raise RuntimeError( - "OpenPype executable was not found " + - "in the semicolon separated list " + - "\"" + ";".join(exe_list) + "\". " + - "The path to the render executable can be configured " + - "from the Plugin Configuration in the Deadline Monitor.") + exe = get_requested_openpype_executable( + exe, dir_list, requested_version + ) + if exe is None: + raise RuntimeError(( + "Cannot find compatible version available for version {}" + " requested by the job. Please add it through plugin" + " configuration in Deadline or install it to configured" + " directory." + ).format(requested_version)) + + if not exe: + raise RuntimeError(( + "OpenPype executable was not found in the semicolon " + "separated list \"{}\"." + "The path to the render executable can be configured" + " from the Plugin Configuration in the Deadline Monitor." + ).format(";".join(exe_list))) print("--- OpenPype executable: {}".format(exe)) @@ -172,22 +326,22 @@ def inject_openpype_environment(deadlinePlugin): export_url ] - add_args = {} - add_args['project'] = \ - job.GetJobEnvironmentKeyValue('AVALON_PROJECT') - add_args['asset'] = job.GetJobEnvironmentKeyValue('AVALON_ASSET') - add_args['task'] = job.GetJobEnvironmentKeyValue('AVALON_TASK') - add_args['app'] = job.GetJobEnvironmentKeyValue('AVALON_APP_NAME') - add_args["envgroup"] = "farm" - - if all(add_args.values()): - for key, value in add_args.items(): - args.append("--{}".format(key)) - args.append(value) + add_kwargs = { + "project": job.GetJobEnvironmentKeyValue("AVALON_PROJECT"), + "asset": job.GetJobEnvironmentKeyValue("AVALON_ASSET"), + "task": job.GetJobEnvironmentKeyValue("AVALON_TASK"), + "app": job.GetJobEnvironmentKeyValue("AVALON_APP_NAME"), + "envgroup": "farm" + } + if all(add_kwargs.values()): + for key, value in add_kwargs.items(): + args.extend(["--{}".format(key), value]) + else: - msg = "Required env vars: AVALON_PROJECT, AVALON_ASSET, " + \ - "AVALON_TASK, AVALON_APP_NAME" - raise RuntimeError(msg) + raise RuntimeError(( + "Missing required env vars: AVALON_PROJECT, AVALON_ASSET," + " AVALON_TASK, AVALON_APP_NAME" + )) if not os.environ.get("OPENPYPE_MONGO"): print(">>> Missing OPENPYPE_MONGO env var, process won't work") @@ -208,12 +362,12 @@ def inject_openpype_environment(deadlinePlugin): print(">>> Loading file ...") with open(export_url) as fp: contents = json.load(fp) - for key, value in contents.items(): - deadlinePlugin.SetProcessEnvironmentVariable(key, value) + + for key, value in contents.items(): + deadlinePlugin.SetProcessEnvironmentVariable(key, value) script_url = job.GetJobPluginInfoKeyValue("ScriptFilename") if script_url: - script_url = script_url.format(**contents).replace("\\", "/") print(">>> Setting script path {}".format(script_url)) job.SetJobPluginInfoKeyValue("ScriptFilename", script_url) diff --git a/openpype/modules/ftrack/lib/avalon_sync.py b/openpype/modules/ftrack/lib/avalon_sync.py index 935d1e85c9d..0341c257178 100644 --- a/openpype/modules/ftrack/lib/avalon_sync.py +++ b/openpype/modules/ftrack/lib/avalon_sync.py @@ -1556,7 +1556,7 @@ def prepare_avalon_entities(self, ft_project_name): deleted_entities.append(mongo_id) av_ent = self.avalon_ents_by_id[mongo_id] - av_ent_path_items = [p for p in av_ent["data"]["parents"]] + av_ent_path_items = list(av_ent["data"]["parents"]) av_ent_path_items.append(av_ent["name"]) self.log.debug("Deleted <{}>".format("/".join(av_ent_path_items))) @@ -1855,7 +1855,7 @@ def prepare_changes(self): _vis_par = _avalon_ent["data"]["visualParent"] _name = _avalon_ent["name"] if _name in self.all_ftrack_names: - av_ent_path_items = _avalon_ent["data"]["parents"] + av_ent_path_items = list(_avalon_ent["data"]["parents"]) av_ent_path_items.append(_name) av_ent_path = "/".join(av_ent_path_items) # TODO report @@ -1997,7 +1997,7 @@ def synchronize(self): {"_id": mongo_id}, item )) - av_ent_path_items = item["data"]["parents"] + av_ent_path_items = list(item["data"]["parents"]) av_ent_path_items.append(item["name"]) av_ent_path = "/".join(av_ent_path_items) self.log.debug( @@ -2110,6 +2110,7 @@ def check_unarchivation(self, ftrack_id, mongo_id, name): entity_dict = self.entities_dict[ftrack_id] + final_parents = entity_dict["final_entity"]["data"]["parents"] if archived_by_id: # if is changeable then unarchive (nothing to check here) if self.changeability_by_mongo_id[mongo_id]: @@ -2123,10 +2124,8 @@ def check_unarchivation(self, ftrack_id, mongo_id, name): archived_name = archived_by_id["name"] if ( - archived_name != entity_dict["name"] or - archived_parents != entity_dict["final_entity"]["data"][ - "parents" - ] + archived_name != entity_dict["name"] + or archived_parents != final_parents ): return None @@ -2136,11 +2135,7 @@ def check_unarchivation(self, ftrack_id, mongo_id, name): for archived in archived_by_name: mongo_id = str(archived["_id"]) archived_parents = archived.get("data", {}).get("parents") - if ( - archived_parents == entity_dict["final_entity"]["data"][ - "parents" - ] - ): + if archived_parents == final_parents: return mongo_id # Secondly try to find more close to current ftrack entity @@ -2350,8 +2345,7 @@ def reload_parents(self, hierarchy_changing_ids): continue changed = True - parents = [par for par in _parents] - hierarchy = "/".join(parents) + parents = list(_parents) self.entities_dict[ftrack_id][ "final_entity"]["data"]["parents"] = parents diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py index 159e60024d7..0e8209866f8 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_api.py @@ -36,10 +36,35 @@ def process(self, instance): return context = instance.context + task_entity, parent_entity = self.get_instance_entities( + instance, context) + if parent_entity is None: + self.log.info(( + "Skipping ftrack integration. Instance \"{}\" does not" + " have specified ftrack entities." + ).format(str(instance))) + return + session = context.data["ftrackSession"] + # Reset session operations and reconfigure locations + session.recorded_operations.clear() + session._configure_locations() + + try: + self.integrate_to_ftrack( + session, + instance, + task_entity, + parent_entity, + component_list + ) + except Exception: + session.reset() + raise + + def get_instance_entities(self, instance, context): parent_entity = None - default_asset_name = None # If instance has set "ftrackEntity" or "ftrackTask" then use them from # instance. Even if they are set to None. If they are set to None it # has a reason. (like has different context) @@ -52,15 +77,21 @@ def process(self, instance): parent_entity = context.data.get("ftrackEntity") if task_entity: - default_asset_name = task_entity["name"] parent_entity = task_entity["parent"] - if parent_entity is None: - self.log.info(( - "Skipping ftrack integration. Instance \"{}\" does not" - " have specified ftrack entities." - ).format(str(instance))) - return + return task_entity, parent_entity + + def integrate_to_ftrack( + self, + session, + instance, + task_entity, + parent_entity, + component_list + ): + default_asset_name = None + if task_entity: + default_asset_name = task_entity["name"] if not default_asset_name: default_asset_name = parent_entity["name"] @@ -186,13 +217,7 @@ def _set_task_status(self, instance, project_entity, task_entity, session): self.log.info("Setting task status to \"{}\"".format(status_name)) task_entity["status"] = status - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() def _fill_component_locations(self, session, component_list): components_by_location_name = collections.defaultdict(list) @@ -495,13 +520,7 @@ def create_component(self, session, asset_version_entity, data): session.delete(member) del(member) - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() # Reset members in memory if "members" in component_entity.keys(): @@ -617,13 +636,7 @@ def create_component(self, session, asset_version_entity, data): ) else: # Commit changes. - try: - session.commit() - except Exception: - tp, value, tb = sys.exc_info() - session.rollback() - session._configure_locations() - six.reraise(tp, value, tb) + session.commit() def _create_components(self, session, asset_versions_data_by_id): for item in asset_versions_data_by_id.values(): diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py index e7c265988ee..6ed02bc8b65 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_description.py @@ -38,7 +38,7 @@ def process(self, instance): self.log.info("There are any integrated AssetVersions") return - comment = (instance.context.data.get("comment") or "").strip() + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: diff --git a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py index ac3fa874e00..6776509ddae 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_ftrack_note.py @@ -45,7 +45,7 @@ def process(self, instance): host_name = context.data["hostName"] app_name = context.data["appName"] app_label = context.data["appLabel"] - comment = (context.data.get("comment") or "").strip() + comment = instance.data["comment"] if not comment: self.log.info("Comment is not set.") else: diff --git a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py index fa7a89050c5..046dfd9ad8f 100644 --- a/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/openpype/modules/ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -7,10 +7,8 @@ from openpype.client import get_asset_by_id from openpype.lib import filter_profiles +from openpype.pipeline import KnownPublishError - -# Copy of constant `openpype_modules.ftrack.lib.avalon_sync.CUST_ATTR_AUTO_SYNC` -CUST_ATTR_AUTO_SYNC = "avalon_auto_sync" CUST_ATTR_GROUP = "openpype" @@ -19,7 +17,6 @@ def get_pype_attr(session, split_hierarchical=True): custom_attributes = [] hier_custom_attributes = [] - # TODO remove deprecated "avalon" group from query cust_attrs_query = ( "select id, entity_type, object_type_id, is_hierarchical, default" " from CustomAttributeConfiguration" @@ -79,120 +76,284 @@ class IntegrateHierarchyToFtrack(pyblish.api.ContextPlugin): create_task_status_profiles = [] def process(self, context): - self.context = context - if "hierarchyContext" not in self.context.data: + if "hierarchyContext" not in context.data: return hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - session = self.context.data["ftrackSession"] - project_name = self.context.data["projectEntity"]["name"] - query = 'Project where full_name is "{}"'.format(project_name) - project = session.query(query).one() - auto_sync_state = project["custom_attributes"][CUST_ATTR_AUTO_SYNC] + session = context.data["ftrackSession"] + project_name = context.data["projectName"] + project = session.query( + 'select id, full_name from Project where full_name is "{}"'.format( + project_name + ) + ).first() + if not project: + raise KnownPublishError( + "Project \"{}\" was not found on ftrack.".format(project_name) + ) + self.context = context self.session = session self.ft_project = project self.task_types = self.get_all_task_types(project) self.task_statuses = self.get_task_statuses(project) - # disable termporarily ftrack project's autosyncing - if auto_sync_state: - self.auto_sync_off(project) + # import ftrack hierarchy + self.import_to_ftrack(project_name, hierarchy_context) + + def query_ftrack_entitites(self, session, ft_project): + project_id = ft_project["id"] + entities = session.query(( + "select id, name, parent_id" + " from TypedContext where project_id is \"{}\"" + ).format(project_id)).all() + + entities_by_id = {} + entities_by_parent_id = collections.defaultdict(list) + for entity in entities: + entities_by_id[entity["id"]] = entity + parent_id = entity["parent_id"] + entities_by_parent_id[parent_id].append(entity) + + ftrack_hierarchy = [] + ftrack_id_queue = collections.deque() + ftrack_id_queue.append((project_id, ftrack_hierarchy)) + while ftrack_id_queue: + item = ftrack_id_queue.popleft() + ftrack_id, parent_list = item + if ftrack_id == project_id: + entity = ft_project + name = entity["full_name"] + else: + entity = entities_by_id[ftrack_id] + name = entity["name"] + + children = [] + parent_list.append({ + "name": name, + "low_name": name.lower(), + "entity": entity, + "children": children, + }) + for child in entities_by_parent_id[ftrack_id]: + ftrack_id_queue.append((child["id"], children)) + return ftrack_hierarchy + + def find_matching_ftrack_entities( + self, hierarchy_context, ftrack_hierarchy + ): + walk_queue = collections.deque() + for entity_name, entity_data in hierarchy_context.items(): + walk_queue.append( + (entity_name, entity_data, ftrack_hierarchy) + ) + + matching_ftrack_entities = [] + while walk_queue: + item = walk_queue.popleft() + entity_name, entity_data, ft_children = item + matching_ft_child = None + for ft_child in ft_children: + if ft_child["low_name"] == entity_name.lower(): + matching_ft_child = ft_child + break - try: - # import ftrack hierarchy - self.import_to_ftrack(project_name, hierarchy_context) - except Exception: - raise - finally: - if auto_sync_state: - self.auto_sync_on(project) + if matching_ft_child is None: + continue + + entity = matching_ft_child["entity"] + entity_data["ft_entity"] = entity + matching_ftrack_entities.append(entity) + + hierarchy_children = entity_data.get("childs") + if not hierarchy_children: + continue + + for child_name, child_data in hierarchy_children.items(): + walk_queue.append( + (child_name, child_data, matching_ft_child["children"]) + ) + return matching_ftrack_entities + + def query_custom_attribute_values(self, session, entities, hier_attrs): + attr_ids = { + attr["id"] + for attr in hier_attrs + } + entity_ids = { + entity["id"] + for entity in entities + } + output = { + entity_id: {} + for entity_id in entity_ids + } + if not attr_ids or not entity_ids: + return {} + + joined_attr_ids = ",".join( + ['"{}"'.format(attr_id) for attr_id in attr_ids] + ) + + # Query values in chunks + chunk_size = int(5000 / len(attr_ids)) + # Make sure entity_ids is `list` for chunk selection + entity_ids = list(entity_ids) + results = [] + for idx in range(0, len(entity_ids), chunk_size): + joined_entity_ids = ",".join([ + '"{}"'.format(entity_id) + for entity_id in entity_ids[idx:idx + chunk_size] + ]) + results.extend( + session.query( + ( + "select value, entity_id, configuration_id" + " from CustomAttributeValue" + " where entity_id in ({}) and configuration_id in ({})" + ).format( + joined_entity_ids, + joined_attr_ids + ) + ).all() + ) + + for result in results: + attr_id = result["configuration_id"] + entity_id = result["entity_id"] + output[entity_id][attr_id] = result["value"] - def import_to_ftrack(self, project_name, input_data, parent=None): + return output + + def import_to_ftrack(self, project_name, hierarchy_context): # Prequery hiearchical custom attributes - hier_custom_attributes = get_pype_attr(self.session)[1] + hier_attrs = get_pype_attr(self.session)[1] hier_attr_by_key = { attr["key"]: attr - for attr in hier_custom_attributes + for attr in hier_attrs } + # Query user entity (for comments) + user = self.session.query( + "User where username is \"{}\"".format(self.session.api_user) + ).first() + if not user: + self.log.warning( + "Was not able to query current User {}".format( + self.session.api_user + ) + ) + + # Query ftrack hierarchy with parenting + ftrack_hierarchy = self.query_ftrack_entitites( + self.session, self.ft_project) + + # Fill ftrack entities to hierarchy context + # - there is no need to query entities again + matching_entities = self.find_matching_ftrack_entities( + hierarchy_context, ftrack_hierarchy) + # Query custom attribute values of each entity + custom_attr_values_by_id = self.query_custom_attribute_values( + self.session, matching_entities, hier_attrs) + # Get ftrack api module (as they are different per python version) ftrack_api = self.context.data["ftrackPythonModule"] - for entity_name in input_data: - entity_data = input_data[entity_name] + # Use queue of hierarchy items to process + import_queue = collections.deque() + for entity_name, entity_data in hierarchy_context.items(): + import_queue.append( + (entity_name, entity_data, None) + ) + + while import_queue: + item = import_queue.popleft() + entity_name, entity_data, parent = item + entity_type = entity_data['entity_type'] self.log.debug(entity_data) - self.log.debug(entity_type) - - if entity_type.lower() == 'project': - entity = self.ft_project - elif self.ft_project is None or parent is None: + entity = entity_data.get("ft_entity") + if entity is None and entity_type.lower() == "project": raise AssertionError( "Collected items are not in right order!" ) - # try to find if entity already exists - else: - query = ( - 'TypedContext where name is "{0}" and ' - 'project_id is "{1}"' - ).format(entity_name, self.ft_project["id"]) - try: - entity = self.session.query(query).one() - except Exception: - entity = None - # Create entity if not exists if entity is None: - entity = self.create_entity( - name=entity_name, - type=entity_type, - parent=parent - ) + entity = self.session.create(entity_type, { + "name": entity_name, + "parent": parent + }) + entity_data["ft_entity"] = entity + # self.log.info('entity: {}'.format(dict(entity))) # CUSTOM ATTRIBUTES - custom_attributes = entity_data.get('custom_attributes', []) - instances = [ - instance - for instance in self.context - if instance.data.get("asset") == entity["name"] - ] + custom_attributes = entity_data.get('custom_attributes', {}) + instances = [] + for instance in self.context: + instance_asset_name = instance.data.get("asset") + if ( + instance_asset_name + and instance_asset_name.lower() == entity["name"].lower() + ): + instances.append(instance) for instance in instances: instance.data["ftrackEntity"] = entity - for key in custom_attributes: + for key, cust_attr_value in custom_attributes.items(): + if cust_attr_value is None: + continue + hier_attr = hier_attr_by_key.get(key) # Use simple method if key is not hierarchical if not hier_attr: - assert (key in entity['custom_attributes']), ( - 'Missing custom attribute key: `{0}` in attrs: ' - '`{1}`'.format(key, entity['custom_attributes'].keys()) + if key not in entity["custom_attributes"]: + raise KnownPublishError(( + "Missing custom attribute in ftrack with name '{}'" + ).format(key)) + + entity['custom_attributes'][key] = cust_attr_value + continue + + attr_id = hier_attr["id"] + entity_values = custom_attr_values_by_id.get(entity["id"], {}) + # New value is defined by having id in values + # - it can be set to 'None' (ftrack allows that using API) + is_new_value = attr_id not in entity_values + attr_value = entity_values.get(attr_id) + + # Use ftrack operations method to set hiearchical + # attribute value. + # - this is because there may be non hiearchical custom + # attributes with different properties + entity_key = collections.OrderedDict(( + ("configuration_id", hier_attr["id"]), + ("entity_id", entity["id"]) + )) + op = None + if is_new_value: + op = ftrack_api.operation.CreateEntityOperation( + "CustomAttributeValue", + entity_key, + {"value": cust_attr_value} ) - entity['custom_attributes'][key] = custom_attributes[key] - - else: - # Use ftrack operations method to set hiearchical - # attribute value. - # - this is because there may be non hiearchical custom - # attributes with different properties - entity_key = collections.OrderedDict() - entity_key["configuration_id"] = hier_attr["id"] - entity_key["entity_id"] = entity["id"] - self.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - "ContextCustomAttributeValue", - entity_key, - "value", - ftrack_api.symbol.NOT_SET, - custom_attributes[key] - ) + elif attr_value != cust_attr_value: + op = ftrack_api.operation.UpdateEntityOperation( + "CustomAttributeValue", + entity_key, + "value", + attr_value, + cust_attr_value ) + if op is not None: + self.session.recorded_operations.push(op) + + if self.session.recorded_operations: try: self.session.commit() except Exception: @@ -206,7 +367,7 @@ def import_to_ftrack(self, project_name, input_data, parent=None): for instance in instances: task_name = instance.data.get("task") if task_name: - instances_by_task_name[task_name].append(instance) + instances_by_task_name[task_name.lower()].append(instance) tasks = entity_data.get('tasks', []) existing_tasks = [] @@ -247,30 +408,28 @@ def import_to_ftrack(self, project_name, input_data, parent=None): six.reraise(tp, value, tb) # Create notes. - user = self.session.query( - "User where username is \"{}\"".format(self.session.api_user) - ).first() - if user: - for comment in entity_data.get("comments", []): + entity_comments = entity_data.get("comments") + if user and entity_comments: + for comment in entity_comments: entity.create_note(comment, user) - else: - self.log.warning( - "Was not able to query current User {}".format( - self.session.api_user - ) - ) - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) + + try: + self.session.commit() + except Exception: + tp, value, tb = sys.exc_info() + self.session.rollback() + self.session._configure_locations() + six.reraise(tp, value, tb) # Import children. - if 'childs' in entity_data: - self.import_to_ftrack( - project_name, entity_data['childs'], entity) + children = entity_data.get("childs") + if not children: + continue + + for entity_name, entity_data in children.items(): + import_queue.append( + (entity_name, entity_data, entity) + ) def create_links(self, project_name, entity_data, entity): # Clear existing links. @@ -366,48 +525,6 @@ def create_task(self, name, task_type, parent): return task - def create_entity(self, name, type, parent): - entity = self.session.create(type, { - 'name': name, - 'parent': parent - }) - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - - return entity - - def auto_sync_off(self, project): - project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = False - - self.log.info("Ftrack autosync swithed off") - - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - - def auto_sync_on(self, project): - - project["custom_attributes"][CUST_ATTR_AUTO_SYNC] = True - - self.log.info("Ftrack autosync swithed on") - - try: - self.session.commit() - except Exception: - tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() - six.reraise(tp, value, tb) - def _get_active_assets(self, context): """ Returns only asset dictionary. Usually the last part of deep dictionary which @@ -429,19 +546,17 @@ def get_pure_hierarchy_data(input_dict): hierarchy_context = context.data["hierarchyContext"] - active_assets = [] + active_assets = set() # filter only the active publishing insatnces for instance in context: if instance.data.get("publish") is False: continue - if not instance.data.get("asset"): - continue - - active_assets.append(instance.data["asset"]) + asset_name = instance.data.get("asset") + if asset_name: + active_assets.add(asset_name) # remove duplicity in list - active_assets = list(set(active_assets)) - self.log.debug("__ active_assets: {}".format(active_assets)) + self.log.debug("__ active_assets: {}".format(list(active_assets))) return get_pure_hierarchy_data(hierarchy_context) diff --git a/openpype/modules/ftrack/scripts/sub_event_status.py b/openpype/modules/ftrack/scripts/sub_event_status.py index 6c7ecb8351d..eb3f63c04b7 100644 --- a/openpype/modules/ftrack/scripts/sub_event_status.py +++ b/openpype/modules/ftrack/scripts/sub_event_status.py @@ -7,6 +7,8 @@ import socket import datetime +import appdirs + import ftrack_api from openpype_modules.ftrack.ftrack_server.ftrack_server import FtrackServer from openpype_modules.ftrack.ftrack_server.lib import ( @@ -253,6 +255,15 @@ def openpype_version_items(self): ) }) + items.append({ + "type": "label", + "value": ( + "Local versions dir: {}
Version repository path: {}" + ).format( + appdirs.user_data_dir("openpype", "pypeclub"), + os.environ.get("OPENPYPE_PATH") + ) + }) items.append({"type": "label", "value": "---"}) return items diff --git a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py index bf80095225e..e5e64394398 100644 --- a/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py +++ b/openpype/modules/kitsu/plugins/publish/integrate_kitsu_review.py @@ -31,7 +31,6 @@ def process(self, instance): continue review_path = representation.get("published_path") - self.log.debug("Found review at: {}".format(review_path)) gazu.task.add_preview( diff --git a/openpype/modules/slack/plugins/publish/collect_slack_family.py b/openpype/modules/slack/plugins/publish/collect_slack_family.py index 39b05937dce..27e899d59ab 100644 --- a/openpype/modules/slack/plugins/publish/collect_slack_family.py +++ b/openpype/modules/slack/plugins/publish/collect_slack_family.py @@ -18,15 +18,15 @@ class CollectSlackFamilies(pyblish.api.InstancePlugin): profiles = None def process(self, instance): - task_name = legacy_io.Session.get("AVALON_TASK") + task_data = instance.data["anatomyData"].get("task", {}) family = self.main_family_from_instance(instance) key_values = { "families": family, - "tasks": task_name, + "tasks": task_data.get("name"), + "task_types": task_data.get("type"), "hosts": instance.data["anatomyData"]["app"], "subsets": instance.data["subset"] } - profile = filter_profiles(self.profiles, key_values, logger=self.log) diff --git a/openpype/modules/slack/plugins/publish/integrate_slack_api.py b/openpype/modules/slack/plugins/publish/integrate_slack_api.py index 643e55915b0..0cd5ec9de89 100644 --- a/openpype/modules/slack/plugins/publish/integrate_slack_api.py +++ b/openpype/modules/slack/plugins/publish/integrate_slack_api.py @@ -112,7 +112,13 @@ def _get_filled_message(self, message_templ, instance, review_path=None): if review_path: fill_pairs.append(("review_filepath", review_path)) - task_data = fill_data.get("task") + task_data = ( + copy.deepcopy(instance.data.get("anatomyData", {})).get("task") + or fill_data.get("task") + ) + if not isinstance(task_data, dict): + # fallback for legacy - if task_data is only task name + task_data["name"] = task_data if task_data: if ( "{task}" in message_templ @@ -142,13 +148,17 @@ def _get_filled_message(self, message_templ, instance, review_path=None): def _get_thumbnail_path(self, instance): """Returns abs url for thumbnail if present in instance repres""" - published_path = None + thumbnail_path = None for repre in instance.data.get("representations", []): if repre.get('thumbnail') or "thumbnail" in repre.get('tags', []): - if os.path.exists(repre["published_path"]): - published_path = repre["published_path"] + repre_thumbnail_path = ( + repre.get("published_path") or + os.path.join(repre["stagingDir"], repre["files"]) + ) + if os.path.exists(repre_thumbnail_path): + thumbnail_path = repre_thumbnail_path break - return published_path + return thumbnail_path def _get_review_path(self, instance): """Returns abs url for review if present in instance repres""" @@ -178,10 +188,17 @@ def _python2_call(self, token, channel, message, publish_files): channel=channel, title=os.path.basename(p_file) ) - attachment_str += "\n<{}|{}>".format( - response["file"]["permalink"], - os.path.basename(p_file)) - file_ids.append(response["file"]["id"]) + if response.get("error"): + error_str = self._enrich_error( + str(response.get("error")), + channel) + self.log.warning( + "Error happened: {}".format(error_str)) + else: + attachment_str += "\n<{}|{}>".format( + response["file"]["permalink"], + os.path.basename(p_file)) + file_ids.append(response["file"]["id"]) if publish_files: message += attachment_str diff --git a/openpype/pipeline/create/context.py b/openpype/pipeline/create/context.py index 4fd460ffea1..9c468ae8fc3 100644 --- a/openpype/pipeline/create/context.py +++ b/openpype/pipeline/create/context.py @@ -199,7 +199,7 @@ def add_action(self, label, callback): }) -class AttributeValues: +class AttributeValues(object): """Container which keep values of Attribute definitions. Goal is to have one object which hold values of attribute definitions for @@ -584,6 +584,7 @@ def __init__( if key in data: data.pop(key) + self._data["variant"] = self._data.get("variant") or "" # Stored creator specific attribute values # {key: value} creator_values = copy.deepcopy(orig_creator_attributes) diff --git a/openpype/pipeline/create/creator_plugins.py b/openpype/pipeline/create/creator_plugins.py index 782534d5891..bb5ce00452a 100644 --- a/openpype/pipeline/create/creator_plugins.py +++ b/openpype/pipeline/create/creator_plugins.py @@ -393,8 +393,9 @@ def get_subset_name( asset_doc(dict): Asset document for which subset is created. project_name(str): Project name. host_name(str): Which host creates subset. - instance(str|None): Object of 'CreatedInstance' for which is - subset name updated. Passed only on subset name update. + instance(CreatedInstance|None): Object of 'CreatedInstance' for + which is subset name updated. Passed only on subset name + update. """ dynamic_data = self.get_dynamic_data( diff --git a/openpype/pipeline/publish/publish_plugins.py b/openpype/pipeline/publish/publish_plugins.py index 6e2be1ce2c3..47dfaf6b981 100644 --- a/openpype/pipeline/publish/publish_plugins.py +++ b/openpype/pipeline/publish/publish_plugins.py @@ -1,3 +1,4 @@ +import inspect from abc import ABCMeta import pyblish.api @@ -132,19 +133,34 @@ def convert_attribute_values(cls, attribute_values): ) return attribute_values - def get_attr_values_from_data(self, data): + @staticmethod + def get_attr_values_from_data_for_plugin(plugin, data): """Get attribute values for attribute definitions from data. Args: + plugin (Union[publish.api.Plugin, Type[publish.api.Plugin]]): The + plugin for which attributes are extracted. data(dict): Data from instance or context. """ + if not inspect.isclass(plugin): + plugin = plugin.__class__ + return ( data .get("publish_attributes", {}) - .get(self.__class__.__name__, {}) + .get(plugin.__name__, {}) ) + def get_attr_values_from_data(self, data): + """Get attribute values for attribute definitions from data. + + Args: + data(dict): Data from instance or context. + """ + + return self.get_attr_values_from_data_for_plugin(self.__class__, data) + class OptionalPyblishPluginMixin(OpenPypePyblishPluginMixin): """Prepare mixin for optional plugins. diff --git a/openpype/plugins/publish/collect_anatomy_instance_data.py b/openpype/plugins/publish/collect_anatomy_instance_data.py index f67d3373d9b..909b49a07d4 100644 --- a/openpype/plugins/publish/collect_anatomy_instance_data.py +++ b/openpype/plugins/publish/collect_anatomy_instance_data.py @@ -188,7 +188,7 @@ def fill_latest_versions(self, context, project_name): for subset_doc in subset_docs: subset_id = subset_doc["_id"] last_version_doc = last_version_docs_by_subset_id.get(subset_id) - if last_version_docs_by_subset_id is None: + if last_version_doc is None: continue asset_id = subset_doc["parent"] diff --git a/openpype/plugins/publish/collect_audio.py b/openpype/plugins/publish/collect_audio.py index 7d53b24e546..3a0ddb32818 100644 --- a/openpype/plugins/publish/collect_audio.py +++ b/openpype/plugins/publish/collect_audio.py @@ -1,21 +1,27 @@ +import collections import pyblish.api from openpype.client import ( - get_last_version_by_subset_name, + get_assets, + get_subsets, + get_last_versions, get_representations, ) -from openpype.pipeline import ( - legacy_io, - get_representation_path, -) +from openpype.pipeline.load import get_representation_path_with_anatomy -class CollectAudio(pyblish.api.InstancePlugin): +class CollectAudio(pyblish.api.ContextPlugin): """Collect asset's last published audio. The audio subset name searched for is defined in: project settings > Collect Audio + + Note: + The plugin was instance plugin but because of so much queries the + plugin was slowing down whole collection phase a lot thus was + converted to context plugin which requires only 4 queries top. """ + label = "Collect Asset Audio" order = pyblish.api.CollectorOrder + 0.1 families = ["review"] @@ -39,67 +45,134 @@ class CollectAudio(pyblish.api.InstancePlugin): audio_subset_name = "audioMain" - def process(self, instance): - if instance.data.get("audio"): - self.log.info( - "Skipping Audio collecion. It is already collected" - ) + def process(self, context): + # Fake filtering by family inside context plugin + filtered_instances = [] + for instance in pyblish.api.instances_by_plugin( + context, self.__class__ + ): + # Skip instances that already have audio filled + if instance.data.get("audio"): + self.log.info( + "Skipping Audio collecion. It is already collected" + ) + continue + filtered_instances.append(instance) + + # Skip if none of instances remained + if not filtered_instances: return # Add audio to instance if exists. + instances_by_asset_name = collections.defaultdict(list) + for instance in filtered_instances: + asset_name = instance.data["asset"] + instances_by_asset_name[asset_name].append(instance) + + asset_names = set(instances_by_asset_name.keys()) self.log.info(( - "Searching for audio subset '{subset}'" - " in asset '{asset}'" + "Searching for audio subset '{subset}' in assets {assets}" ).format( subset=self.audio_subset_name, - asset=instance.data["asset"] + assets=", ".join([ + '"{}"'.format(asset_name) + for asset_name in asset_names + ]) )) - repre_doc = self._get_repre_doc(instance) - - # Add audio to instance if representation was found - if repre_doc: - instance.data["audio"] = [{ - "offset": 0, - "filename": get_representation_path(repre_doc) - }] - self.log.info("Audio Data added to instance ...") - - def _get_repre_doc(self, instance): - cache = instance.context.data.get("__cache_asset_audio") - if cache is None: - cache = {} - instance.context.data["__cache_asset_audio"] = cache - asset_name = instance.data["asset"] - - # first try to get it from cache - if asset_name in cache: - return cache[asset_name] + # Query all required documents + project_name = context.data["projectName"] + anatomy = context.data["anatomy"] + repre_docs_by_asset_names = self.query_representations( + project_name, asset_names) - project_name = legacy_io.active_project() + for asset_name, instances in instances_by_asset_name.items(): + repre_docs = repre_docs_by_asset_names[asset_name] + if not repre_docs: + continue - # Find latest versions document - last_version_doc = get_last_version_by_subset_name( + repre_doc = repre_docs[0] + repre_path = get_representation_path_with_anatomy( + repre_doc, anatomy + ) + for instance in instances: + instance.data["audio"] = [{ + "offset": 0, + "filename": repre_path + }] + self.log.info("Audio Data added to instance ...") + + def query_representations(self, project_name, asset_names): + """Query representations related to audio subsets for passed assets. + + Args: + project_name (str): Project in which we're looking for all + entities. + asset_names (Iterable[str]): Asset names where to look for audio + subsets and their representations. + + Returns: + collections.defaultdict[str, List[Dict[Str, Any]]]: Representations + related to audio subsets by asset name. + """ + + output = collections.defaultdict(list) + # Query asset documents + asset_docs = get_assets( project_name, - self.audio_subset_name, - asset_name=asset_name, - fields=["_id"] + asset_names=asset_names, + fields=["_id", "name"] ) - repre_doc = None - if last_version_doc: - # Try to find it's representation (Expected there is only one) - repre_docs = list(get_representations( - project_name, version_ids=[last_version_doc["_id"]] - )) - if not repre_docs: - self.log.warning( - "Version document does not contain any representations" - ) - else: - repre_doc = repre_docs[0] - - # update cache - cache[asset_name] = repre_doc + asset_id_by_name = {} + for asset_doc in asset_docs: + asset_id_by_name[asset_doc["name"]] = asset_doc["_id"] + asset_ids = set(asset_id_by_name.values()) - return repre_doc + # Query subsets with name define by 'audio_subset_name' attr + # - one or none subsets with the name should be available on an asset + subset_docs = get_subsets( + project_name, + subset_names=[self.audio_subset_name], + asset_ids=asset_ids, + fields=["_id", "parent"] + ) + subset_id_by_asset_id = {} + for subset_doc in subset_docs: + asset_id = subset_doc["parent"] + subset_id_by_asset_id[asset_id] = subset_doc["_id"] + + subset_ids = set(subset_id_by_asset_id.values()) + if not subset_ids: + return output + + # Find all latest versions for the subsets + version_docs_by_subset_id = get_last_versions( + project_name, subset_ids=subset_ids, fields=["_id", "parent"] + ) + version_id_by_subset_id = { + subset_id: version_doc["_id"] + for subset_id, version_doc in version_docs_by_subset_id.items() + } + version_ids = set(version_id_by_subset_id.values()) + if not version_ids: + return output + + # Find representations under latest versions of audio subsets + repre_docs = get_representations( + project_name, version_ids=version_ids + ) + repre_docs_by_version_id = collections.defaultdict(list) + for repre_doc in repre_docs: + version_id = repre_doc["parent"] + repre_docs_by_version_id[version_id].append(repre_doc) + + if not repre_docs_by_version_id: + return output + + for asset_name in asset_names: + asset_id = asset_id_by_name.get(asset_name) + subset_id = subset_id_by_asset_id.get(asset_id) + version_id = version_id_by_subset_id.get(subset_id) + output[asset_name] = repre_docs_by_version_id[version_id] + return output diff --git a/openpype/plugins/publish/collect_comment.py b/openpype/plugins/publish/collect_comment.py index 062142ace9c..12579cd9572 100644 --- a/openpype/plugins/publish/collect_comment.py +++ b/openpype/plugins/publish/collect_comment.py @@ -1,19 +1,123 @@ -""" -Requires: - None -Provides: - context -> comment (str) +"""Collect comment and add option to enter comment per instance. + +Combination of plugins. One define optional input for instances in Publisher +UI (CollectInstanceCommentDef) and second cares that each instance during +collection has available "comment" key in data (CollectComment). + +Plugin 'CollectInstanceCommentDef' define "comment" attribute which won't be +filled with any value if instance does not match families filter or when +plugin is disabled. + +Plugin 'CollectComment' makes sure that each instance in context has +available "comment" key in data which can be set to 'str' or 'None' if is not +set. +- In case instance already has filled comment the plugin's logic is skipped +- The comment is always set and value should be always 'str' even if is empty + +Why are separated: +- 'CollectInstanceCommentDef' can have specific settings to show comment + attribute only to defined families in publisher UI +- 'CollectComment' will run all the time + +Todos: + The comment per instance is not sent via farm. """ import pyblish.api +from openpype.lib.attribute_definitions import TextDef +from openpype.pipeline.publish import OpenPypePyblishPluginMixin + + +class CollectInstanceCommentDef( + pyblish.api.ContextPlugin, + OpenPypePyblishPluginMixin +): + label = "Comment per instance" + targets = ["local"] + # Disable plugin by default + families = [] + enabled = False + + def process(self, instance): + pass + + @classmethod + def apply_settings(cls, project_setting, _): + plugin_settings = project_setting["global"]["publish"].get( + "collect_comment_per_instance" + ) + if not plugin_settings: + return + + if plugin_settings.get("enabled") is not None: + cls.enabled = plugin_settings["enabled"] + if plugin_settings.get("families") is not None: + cls.families = plugin_settings["families"] -class CollectComment(pyblish.api.ContextPlugin): - """This plug-ins displays the comment dialog box per default""" + @classmethod + def get_attribute_defs(cls): + return [ + TextDef("comment", label="Comment") + ] - label = "Collect Comment" - order = pyblish.api.CollectorOrder + +class CollectComment( + pyblish.api.ContextPlugin, + OpenPypePyblishPluginMixin +): + """Collect comment per each instance. + + Plugin makes sure each instance to publish has set "comment" in data so any + further plugin can use it directly. + """ + + label = "Collect Instance Comment" + order = pyblish.api.CollectorOrder + 0.49 def process(self, context): - comment = (context.data.get("comment") or "").strip() - context.data["comment"] = comment + context_comment = self.cleanup_comment(context.data.get("comment")) + # Set it back + context.data["comment"] = context_comment + for instance in context: + instance_label = str(instance) + # Check if comment is already set + instance_comment = self.cleanup_comment( + instance.data.get("comment")) + + # If comment on instance is not set then look for attributes + if not instance_comment: + attr_values = self.get_attr_values_from_data_for_plugin( + CollectInstanceCommentDef, instance.data + ) + instance_comment = self.cleanup_comment( + attr_values.get("comment") + ) + + # Use context comment if instance has all options of comment + # empty + if not instance_comment: + instance_comment = context_comment + + instance.data["comment"] = instance_comment + if instance_comment: + msg_end = " has comment set to: \"{}\"".format( + instance_comment) + else: + msg_end = " does not have set comment" + self.log.debug("Instance {} {}".format(instance_label, msg_end)) + + def cleanup_comment(self, comment): + """Cleanup comment value. + + Args: + comment (Union[str, None]): Comment value from data. + + Returns: + str: Cleaned comment which is stripped or empty string if input + was 'None'. + """ + + if comment: + return comment.strip() + return "" diff --git a/openpype/plugins/publish/collect_resources_path.py b/openpype/plugins/publish/collect_resources_path.py index 90aa0f44bb0..a2d5b95ab21 100644 --- a/openpype/plugins/publish/collect_resources_path.py +++ b/openpype/plugins/publish/collect_resources_path.py @@ -51,6 +51,7 @@ class CollectResourcesPath(pyblish.api.InstancePlugin): "source", "assembly", "fbx", + "gltf", "textures", "action", "background", diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index 4179199317e..fd8dfdece97 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -468,7 +468,7 @@ def prepare_basic_data(self, instance): burnin_data.update({ "version": int(version), - "comment": context.data.get("comment") or "" + "comment": instance.data["comment"] }) intent_label = context.data.get("intent") or "" diff --git a/openpype/plugins/publish/extract_hierarchy_avalon.py b/openpype/plugins/publish/extract_hierarchy_avalon.py index 6b4e5f48c59..b2a6adc210d 100644 --- a/openpype/plugins/publish/extract_hierarchy_avalon.py +++ b/openpype/plugins/publish/extract_hierarchy_avalon.py @@ -1,9 +1,8 @@ +import collections from copy import deepcopy import pyblish.api from openpype.client import ( - get_project, - get_asset_by_id, - get_asset_by_name, + get_assets, get_archived_assets ) from openpype.pipeline import legacy_io @@ -17,7 +16,6 @@ class ExtractHierarchyToAvalon(pyblish.api.ContextPlugin): families = ["clip", "shot"] def process(self, context): - # processing starts here if "hierarchyContext" not in context.data: self.log.info("skipping IntegrateHierarchyToAvalon") return @@ -25,161 +23,236 @@ def process(self, context): if not legacy_io.Session: legacy_io.install() - project_name = legacy_io.active_project() hierarchy_context = self._get_active_assets(context) self.log.debug("__ hierarchy_context: {}".format(hierarchy_context)) - self.project = None - self.import_to_avalon(context, project_name, hierarchy_context) + project_name = context.data["projectName"] + asset_names = self.extract_asset_names(hierarchy_context) - def import_to_avalon( - self, - context, - project_name, - input_data, - parent=None, - ): - for name in input_data: - self.log.info("input_data[name]: {}".format(input_data[name])) - entity_data = input_data[name] - entity_type = entity_data["entity_type"] + asset_docs_by_name = {} + for asset_doc in get_assets(project_name, asset_names=asset_names): + name = asset_doc["name"] + asset_docs_by_name[name] = asset_doc + + archived_asset_docs_by_name = collections.defaultdict(list) + for asset_doc in get_archived_assets( + project_name, asset_names=asset_names + ): + name = asset_doc["name"] + archived_asset_docs_by_name[name].append(asset_doc) + + project_doc = None + hierarchy_queue = collections.deque() + for name, data in hierarchy_context.items(): + hierarchy_queue.append((name, data, None)) + + while hierarchy_queue: + item = hierarchy_queue.popleft() + name, entity_data, parent = item - data = {} - data["entityType"] = entity_type - - # Custom attributes. - for k, val in entity_data.get("custom_attributes", {}).items(): - data[k] = val - - if entity_type.lower() != "project": - data["inputs"] = entity_data.get("inputs", []) - - # Tasks. - tasks = entity_data.get("tasks", {}) - if tasks is not None or len(tasks) > 0: - data["tasks"] = tasks - parents = [] - visualParent = None - # do not store project"s id as visualParent - if self.project is not None: - if self.project["_id"] != parent["_id"]: - visualParent = parent["_id"] - parents.extend( - parent.get("data", {}).get("parents", []) - ) - parents.append(parent["name"]) - data["visualParent"] = visualParent - data["parents"] = parents - - update_data = True - # Process project + entity_type = entity_data["entity_type"] if entity_type.lower() == "project": - entity = get_project(project_name) - # TODO: should be in validator? - assert (entity is not None), "Did not find project in DB" - - # get data from already existing project - cur_entity_data = entity.get("data") or {} - cur_entity_data.update(data) - data = cur_entity_data - - self.project = entity - # Raise error if project or parent are not set - elif self.project is None or parent is None: - raise AssertionError( - "Collected items are not in right order!" + new_parent = project_doc = self.sync_project( + context, + entity_data ) - # Else process assset - else: - entity = get_asset_by_name(project_name, name) - if entity: - # Do not override data, only update - cur_entity_data = entity.get("data") or {} - entity_tasks = cur_entity_data["tasks"] or {} - - # create tasks as dict by default - if not entity_tasks: - cur_entity_data["tasks"] = entity_tasks - - new_tasks = data.pop("tasks", {}) - if "tasks" not in cur_entity_data and not new_tasks: - continue - for task_name in new_tasks: - if task_name in entity_tasks.keys(): - continue - cur_entity_data["tasks"][task_name] = new_tasks[ - task_name] - cur_entity_data.update(data) - data = cur_entity_data - else: - # Skip updating data - update_data = False - - archived_entities = get_archived_assets( - project_name, - asset_names=[name] - ) - unarchive_entity = None - for archived_entity in archived_entities: - archived_parents = ( - archived_entity - .get("data", {}) - .get("parents") - ) - if data["parents"] == archived_parents: - unarchive_entity = archived_entity - break - - if unarchive_entity is None: - # Create entity if doesn"t exist - entity = self.create_avalon_asset( - name, data - ) - else: - # Unarchive if entity was archived - entity = self.unarchive_entity(unarchive_entity, data) + else: + new_parent = self.sync_asset( + name, + entity_data, + parent, + project_doc, + asset_docs_by_name, + archived_asset_docs_by_name + ) # make sure all relative instances have correct avalon data self._set_avalon_data_to_relative_instances( context, project_name, - entity + new_parent ) - if update_data: - # Update entity data with input data - legacy_io.update_many( - {"_id": entity["_id"]}, - {"$set": {"data": data}} - ) + children = entity_data.get("childs") + if not children: + continue + + for child_name, child_data in children.items(): + hierarchy_queue.append((child_name, child_data, new_parent)) - if "childs" in entity_data: - self.import_to_avalon( - context, project_name, entity_data["childs"], entity + def extract_asset_names(self, hierarchy_context): + """Extract all possible asset names from hierarchy context. + + Args: + hierarchy_context (Dict[str, Any]): Nested hierarchy structure. + + Returns: + Set[str]: All asset names from the hierarchy structure. + """ + + hierarchy_queue = collections.deque() + for name, data in hierarchy_context.items(): + hierarchy_queue.append((name, data)) + + asset_names = set() + while hierarchy_queue: + item = hierarchy_queue.popleft() + name, data = item + if data["entity_type"].lower() != "project": + asset_names.add(name) + + children = data.get("childs") + if children: + for child_name, child_data in children.items(): + hierarchy_queue.append((child_name, child_data)) + return asset_names + + def sync_project(self, context, entity_data): + project_doc = context.data["projectEntity"] + + if "data" not in project_doc: + project_doc["data"] = {} + current_data = project_doc["data"] + + changes = {} + entity_type = entity_data["entity_type"] + if current_data.get("entityType") != entity_type: + changes["entityType"] = entity_type + + # Custom attributes. + attributes = entity_data.get("custom_attributes") or {} + for key, value in attributes.items(): + if key not in current_data or current_data[key] != value: + update_key = "data.{}".format(key) + changes[update_key] = value + current_data[key] = value + + if changes: + # Update entity data with input data + legacy_io.update_one( + {"_id": project_doc["_id"]}, + {"$set": changes} + ) + return project_doc + + def sync_asset( + self, + asset_name, + entity_data, + parent, + project, + asset_docs_by_name, + archived_asset_docs_by_name + ): + # Prepare data for new asset or for update comparison + data = { + "entityType": entity_data["entity_type"] + } + + # Custom attributes. + attributes = entity_data.get("custom_attributes") or {} + for key, value in attributes.items(): + data[key] = value + + data["inputs"] = entity_data.get("inputs") or [] + + # Parents and visual parent are empty if parent is project + parents = [] + parent_id = None + if project["_id"] != parent["_id"]: + parent_id = parent["_id"] + # Use parent's parents as source value + parents.extend(parent["data"]["parents"]) + # Add parent's name to parents + parents.append(parent["name"]) + + data["visualParent"] = parent_id + data["parents"] = parents + + asset_doc = asset_docs_by_name.get(asset_name) + # --- Create/Unarchive asset and end --- + if not asset_doc: + # Just use tasks from entity data as they are + # - this is different from the case when tasks are updated + data["tasks"] = entity_data.get("tasks") or {} + archived_asset_doc = None + for archived_entity in archived_asset_docs_by_name[asset_name]: + archived_parents = ( + archived_entity + .get("data", {}) + .get("parents") ) + if data["parents"] == archived_parents: + archived_asset_doc = archived_entity + break + + # Create entity if doesn't exist + if archived_asset_doc is None: + return self.create_avalon_asset( + asset_name, data, project + ) + + return self.unarchive_entity( + archived_asset_doc, data, project + ) + + # --- Update existing asset --- + # Make sure current entity has "data" key + if "data" not in asset_doc: + asset_doc["data"] = {} + cur_entity_data = asset_doc["data"] + cur_entity_tasks = cur_entity_data.get("tasks") or {} + + # Tasks + data["tasks"] = {} + new_tasks = entity_data.get("tasks") or {} + for task_name, task_info in new_tasks.items(): + task_info = deepcopy(task_info) + if task_name in cur_entity_tasks: + src_task_info = deepcopy(cur_entity_tasks[task_name]) + src_task_info.update(task_info) + task_info = src_task_info + + data["tasks"][task_name] = task_info + + changes = {} + for key, value in data.items(): + if key not in cur_entity_data or value != cur_entity_data[key]: + update_key = "data.{}".format(key) + changes[update_key] = value + cur_entity_data[key] = value + + # Update asset in database if necessary + if changes: + # Update entity data with input data + legacy_io.update_one( + {"_id": asset_doc["_id"]}, + {"$set": changes} + ) + return asset_doc - def unarchive_entity(self, entity, data): + def unarchive_entity(self, archived_doc, data, project): # Unarchived asset should not use same data - new_entity = { - "_id": entity["_id"], + asset_doc = { + "_id": archived_doc["_id"], "schema": "openpype:asset-3.0", - "name": entity["name"], - "parent": self.project["_id"], + "name": archived_doc["name"], + "parent": project["_id"], "type": "asset", "data": data } legacy_io.replace_one( - {"_id": entity["_id"]}, - new_entity + {"_id": archived_doc["_id"]}, + asset_doc ) - return new_entity + return asset_doc - def create_avalon_asset(self, name, data): + def create_avalon_asset(self, name, data, project): asset_doc = { "schema": "openpype:asset-3.0", "name": name, - "parent": self.project["_id"], + "parent": project["_id"], "type": "asset", "data": data } @@ -194,27 +267,27 @@ def _set_avalon_data_to_relative_instances( project_name, asset_doc ): + asset_name = asset_doc["name"] + new_parents = asset_doc["data"]["parents"] + hierarchy = "/".join(new_parents) + parent_name = project_name + if new_parents: + parent_name = new_parents[-1] + for instance in context: - # Skip instance if has filled asset entity - if instance.data.get("assetEntity"): + # Skip if instance asset does not match + instance_asset_name = instance.data.get("asset") + if asset_name != instance_asset_name: continue - asset_name = asset_doc["name"] - inst_asset_name = instance.data["asset"] - - if asset_name == inst_asset_name: - instance.data["assetEntity"] = asset_doc - - # get parenting data - parents = asset_doc["data"].get("parents") or list() - # equire only relative parent - parent_name = project_name - if parents: - parent_name = parents[-1] + instance_asset_doc = instance.data.get("assetEntity") + # Update asset entity with new possible changes of asset document + instance.data["assetEntity"] = asset_doc - # update avalon data on instance + # Update anatomy data if asset was not set on instance + if not instance_asset_doc: instance.data["anatomyData"].update({ - "hierarchy": "/".join(parents), + "hierarchy": hierarchy, "task": {}, "parent": parent_name }) @@ -241,7 +314,7 @@ def get_pure_hierarchy_data(input_dict): hierarchy_context = context.data["hierarchyContext"] active_assets = [] - # filter only the active publishing insatnces + # filter only the active publishing instances for instance in context: if instance.data.get("publish") is False: continue diff --git a/openpype/plugins/publish/extract_review.py b/openpype/plugins/publish/extract_review.py index 1f9b30fba36..9310923a9f7 100644 --- a/openpype/plugins/publish/extract_review.py +++ b/openpype/plugins/publish/extract_review.py @@ -179,7 +179,7 @@ def _single_frame_filter(self, input_filepaths, output_defs): single_frame_image = False if len(input_filepaths) == 1: ext = os.path.splitext(input_filepaths[0])[-1] - single_frame_image = ext in IMAGE_EXTENSIONS + single_frame_image = ext.lower() in IMAGE_EXTENSIONS filtered_defs = [] for output_def in output_defs: @@ -501,7 +501,7 @@ def prepare_temp_data(self, instance, repre, output_def): first_sequence_frame += handle_start ext = os.path.splitext(repre["files"][0])[1].replace(".", "") - if ext in self.alpha_exts: + if ext.lower() in self.alpha_exts: input_allow_bg = True return { @@ -598,8 +598,12 @@ def _ffmpeg_arguments( if temp_data["input_is_sequence"]: # Set start frame of input sequence (just frame in filename) # - definition of input filepath + # - add handle start if output should be without handles + start_number = temp_data["first_sequence_frame"] + if temp_data["without_handles"] and temp_data["handles_are_set"]: + start_number += temp_data["handle_start"] ffmpeg_input_args.extend([ - "-start_number", str(temp_data["first_sequence_frame"]) + "-start_number", str(start_number) ]) # TODO add fps mapping `{fps: fraction}` ? @@ -609,49 +613,50 @@ def _ffmpeg_arguments( # "23.976": "24000/1001" # } # Add framerate to input when input is sequence - ffmpeg_input_args.append( - "-framerate {}".format(temp_data["fps"]) - ) + ffmpeg_input_args.extend([ + "-framerate", str(temp_data["fps"]) + ]) + # Add duration of an input sequence if output is video + if not temp_data["output_is_sequence"]: + ffmpeg_input_args.extend([ + "-to", "{:0.10f}".format(duration_seconds) + ]) if temp_data["output_is_sequence"]: # Set start frame of output sequence (just frame in filename) # - this is definition of an output - ffmpeg_output_args.append( - "-start_number {}".format(temp_data["output_frame_start"]) - ) + ffmpeg_output_args.extend([ + "-start_number", str(temp_data["output_frame_start"]) + ]) # Change output's duration and start point if should not contain # handles - start_sec = 0 if temp_data["without_handles"] and temp_data["handles_are_set"]: - # Set start time without handles - # - check if handle_start is bigger than 0 to avoid zero division - if temp_data["handle_start"] > 0: - start_sec = float(temp_data["handle_start"]) / temp_data["fps"] - ffmpeg_input_args.append("-ss {:0.10f}".format(start_sec)) + # Set output duration in seconds + ffmpeg_output_args.extend([ + "-t", "{:0.10}".format(duration_seconds) + ]) - # Set output duration inn seconds - ffmpeg_output_args.append("-t {:0.10}".format(duration_seconds)) + # Add -ss (start offset in seconds) if input is not sequence + if not temp_data["input_is_sequence"]: + start_sec = float(temp_data["handle_start"]) / temp_data["fps"] + # Set start time without handles + # - Skip if start sec is 0.0 + if start_sec > 0.0: + ffmpeg_input_args.extend([ + "-ss", "{:0.10f}".format(start_sec) + ]) # Set frame range of output when input or output is sequence elif temp_data["output_is_sequence"]: - ffmpeg_output_args.append("-frames:v {}".format(output_frames_len)) - - # Add duration of an input sequence if output is video - if ( - temp_data["input_is_sequence"] - and not temp_data["output_is_sequence"] - ): - ffmpeg_input_args.append("-to {:0.10f}".format( - duration_seconds + start_sec - )) + ffmpeg_output_args.extend([ + "-frames:v", str(output_frames_len) + ]) # Add video/image input path - ffmpeg_input_args.append( - "-i {}".format( - path_to_subprocess_arg(temp_data["full_input_path"]) - ) - ) + ffmpeg_input_args.extend([ + "-i", path_to_subprocess_arg(temp_data["full_input_path"]) + ]) # Add audio arguments if there are any. Skipped when output are images. if not temp_data["output_ext_is_image"] and temp_data["with_audio"]: @@ -934,6 +939,8 @@ def input_output_paths(self, new_repre, output_def, temp_data): if output_ext.startswith("."): output_ext = output_ext[1:] + output_ext = output_ext.lower() + # Store extension to representation new_repre["ext"] = output_ext diff --git a/openpype/plugins/publish/extract_thumbnail_from_source.py b/openpype/plugins/publish/extract_thumbnail_from_source.py index 8da12138071..03df1455e23 100644 --- a/openpype/plugins/publish/extract_thumbnail_from_source.py +++ b/openpype/plugins/publish/extract_thumbnail_from_source.py @@ -73,6 +73,7 @@ def process(self, instance): "Adding thumbnail representation: {}".format(new_repre) ) instance.data["representations"].append(new_repre) + instance.data["thumbnailPath"] = dst_filepath def _create_thumbnail(self, context, thumbnail_source): if not thumbnail_source: diff --git a/openpype/plugins/publish/integrate.py b/openpype/plugins/publish/integrate.py index 66f9a7aa598..2ce8037f5f8 100644 --- a/openpype/plugins/publish/integrate.py +++ b/openpype/plugins/publish/integrate.py @@ -112,6 +112,7 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "image", "assembly", "fbx", + "gltf", "textures", "action", "harmony.template", @@ -130,7 +131,8 @@ class IntegrateAsset(pyblish.api.InstancePlugin): "mvUsd", "mvUsdComposition", "mvUsdOverride", - "simpleUnrealTexture" + "simpleUnrealTexture", + "online" ] default_template_name = "publish" @@ -769,7 +771,7 @@ def create_version_data(self, instance): "time": context.data["time"], "author": context.data["user"], "source": source, - "comment": context.data.get("comment"), + "comment": instance.data["comment"], "machine": context.data.get("machine"), "fps": instance.data.get("fps", context.data.get("fps")) } diff --git a/openpype/plugins/publish/integrate_legacy.py b/openpype/plugins/publish/integrate_legacy.py index d05aea1e2ff..8f3b0d42204 100644 --- a/openpype/plugins/publish/integrate_legacy.py +++ b/openpype/plugins/publish/integrate_legacy.py @@ -107,6 +107,7 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): "image", "assembly", "fbx", + "gltf", "textures", "action", "harmony.template", @@ -969,7 +970,7 @@ def create_version_data(self, context, instance): "time": context.data["time"], "author": context.data["user"], "source": source, - "comment": context.data.get("comment"), + "comment": instance.data["comment"], "machine": context.data.get("machine"), "fps": context.data.get( "fps", instance.data.get("fps") diff --git a/openpype/plugins/publish/integrate_thumbnail.py b/openpype/plugins/publish/integrate_thumbnail.py index f74c3d96096..809a1782e00 100644 --- a/openpype/plugins/publish/integrate_thumbnail.py +++ b/openpype/plugins/publish/integrate_thumbnail.py @@ -102,8 +102,31 @@ def process(self, context): thumbnail_root ) + def _get_thumbnail_from_instance(self, instance): + # 1. Look for thumbnail in published representations + published_repres = instance.data.get("published_representations") + path = self._get_thumbnail_path_from_published(published_repres) + if path and os.path.exists(path): + return path + + if path: + self.log.warning( + "Could not find published thumbnail path {}".format(path) + ) + + # 2. Look for thumbnail in "not published" representations + thumbnail_path = self._get_thumbnail_path_from_unpublished(instance) + if thumbnail_path and os.path.exists(thumbnail_path): + return thumbnail_path + + # 3. Look for thumbnail path on instance in 'thumbnailPath' + thumbnail_path = instance.data.get("thumbnailPath") + if thumbnail_path and os.path.exists(thumbnail_path): + return thumbnail_path + return None + def _prepare_instances(self, context): - context_thumbnail_path = context.get("thumbnailPath") + context_thumbnail_path = context.data.get("thumbnailPath") valid_context_thumbnail = False if context_thumbnail_path and os.path.exists(context_thumbnail_path): valid_context_thumbnail = True @@ -122,8 +145,7 @@ def _prepare_instances(self, context): continue # Find thumbnail path on instance - thumbnail_path = self._get_instance_thumbnail_path( - published_repres) + thumbnail_path = self._get_thumbnail_from_instance(instance) if thumbnail_path: self.log.debug(( "Found thumbnail path for instance \"{}\"." @@ -157,7 +179,10 @@ def _get_version_id(self, published_representations): for repre_info in published_representations.values(): return repre_info["representation"]["parent"] - def _get_instance_thumbnail_path(self, published_representations): + def _get_thumbnail_path_from_published(self, published_representations): + if not published_representations: + return None + thumb_repre_doc = None for repre_info in published_representations.values(): repre_doc = repre_info["representation"] @@ -179,6 +204,38 @@ def _get_instance_thumbnail_path(self, published_representations): return None return os.path.normpath(path) + def _get_thumbnail_path_from_unpublished(self, instance): + repres = instance.data.get("representations") + if not repres: + return None + + thumbnail_repre = next( + ( + repre + for repre in repres + if repre["name"] == "thumbnail" + ), + None + ) + if not thumbnail_repre: + return None + + staging_dir = thumbnail_repre.get("stagingDir") + if not staging_dir: + staging_dir = instance.data.get("stagingDir") + + filename = thumbnail_repre.get("files") + if not staging_dir or not filename: + return None + + if isinstance(filename, (list, tuple, set)): + filename = filename[0] + + thumbnail_path = os.path.join(staging_dir, filename) + if os.path.exists(thumbnail_path): + return thumbnail_path + return None + def _integrate_thumbnails( self, filtered_instance_items, diff --git a/openpype/resources/app_icons/3dsmax.png b/openpype/resources/app_icons/3dsmax.png new file mode 100644 index 0000000000000000000000000000000000000000..9ebdf6099f6ac279ebaeccdf21a658a34da5e7b6 GIT binary patch literal 12804 zcmcJ0_gj-q&@M#;LPtR9O&|~uL5fK4O*#ol6fhK}S&-hOhAusTs7NnK2mxu*jUX6Q zdJ(0FA{~_8`5xZy`~&B@&iNt9l_$Hiv$M0a_uR8durxPh0`Y(-C@7eWjr6Q3C@7`R zUvw9NCqZE;e!vH%kF}u=1>q~-67c2118p;H3W}OkhGP^B@SXmN5yFRpg89Swi?Zi& zg$o6R+zVqpZQDSHwb%5ye9(7$Ef0*v-@Vz}dRIh+pr^m8B_ynRm9I+#H*QZY!Qrbc^HJ*$UXz7JZ;saBVHp>%jM>f>E zYc|vx+rm0@lHzj}DfTTI_EBxy68>u{`rUH0_XioCuRe@iGHx?f%Pn94E%_jACeV9Hac~(PZ9tUT7sGm`Z zUN=BK(nXa<{NUif(@?QFpM%>J%c-dSas4Xo^znVD#zrXA;rJpnuS096<$z0ZcADkl z+1GepKrAt!+QYH&YcEKd>61V!;_j13DOnXL1FrQ+x`+Kc6Q+rpXshf#OE`V>t%MQ* z89T?RlWP{WG_|)j)YwMZMRCfrR&b(`A%a@77u<&c<6XEXzEvJpIaZS&hdTfLqBjII zyiI`sqxGt@k#Oa1bXdx)T{KeDzbk6NvmTKA@>J2b6B z!T1Z`TI_aanyfIgZ6mC{HtE0Lt;&B$7Gbmo2xc9WSUerh zZw<-y{q2P!(l3>Tz*6o~LT6o`65D2H; z3X9ENAvi;HC$(Vv+_LIzD1^IZUj{fyG?rN#rAT*&`&_+!cBsZqor}x-d*u0OOd=bf zA9}eO<@Cwg%rnT|z6;~=?fA~7Yk=sEBY9Bh+4(~+8rUsGyji1J;C&afRFg)*yEW5&&P61w&WOS0HB7-6moQkE#Y7=SEoRoTYcN%Lk z!t|_#a}RPyT05`S_%K?X>n;t((+M>o+gqG1=m%X0YYW-JRabzQx0)X9lJOyry^!WvH; zAa^QOJ5eC2$bk;`GA7!kKg+r7g`8T#{j{Vyq1;8$ZXC79cSFyEp)$T%-B4o&N;H%f zPX_GhrUkpFf2kJ)^ckz&j3Z$w?{F76yVi#W`Sg-WVB8f;xZ>YNdE$hFU_4eRGxt&t zA*UM*aRlRedIj(E-r**pT+zm#pZd^FZ>N0V_Xkm+p)~2xo-(s3Wv2l%6GkvW6eKfw z^-oES71zR*KeT8l`m6cqM_BBk7A#&X>1Kio&1x_rx%O!k2}H4x@8;UHEY%=CLLy^( zGjlWV3mY=}ph?SLI@?10Ak@ma6_hT(aAC$UEX@OTP~91N-VdLbp7z4&v)26~Wu!}j3wjZcUN zm77ZrZJl7l zqJmaUZ@;P#G>yJkIrR4@R?Rb8vO#nR)KvMB=W$nBJwCaRh^25xnLR$f-SF7%J*;x5Lgw@eu8mY;75woD?W|!JrWU_YxQNX-yLY|E`VP<~5HIo~ zBEyfAFFkzyL}w6_aMKQ6Fe!r&@Wn@lEJ4Qgj2IbZA@G-F`kPr#TDmfK|(rGL8+-RPeIR__`IBC*p+`N{p_E&=wL> zmYm~&MYd+u{5B!q462pl_irMY?W94kpeT<86E}e+*p%zZb;G^5BQ%kN;pc#v1WFcX z=kU93mnWy$4O$y;jQ_hC?7Pt`l}MDX4y#IOc+$3Mymk8_&bU}+*VXj`8bcX4-rcVR zc8zIDOp_;j4yMNOP1;w3M1T(Q4a_&S#=FR-=AK{OlB{4#mT+z-n~z89HE`d#_)2PBXO60(Jst69MdX_?Y*O56B zh^yWgk5)Z@Q6U_Ud%+t6G|j+U8t>;<3dwI56zo1!j^}iTG1^3tq^`VvT{i7%$UUkMenNv#^}k=y^Orb#O+-{=3XqC8lML2unl+ZqiVlg}WshQGZS)X*Ut#=vKRzH~ zJeF{p(jvL~vt~c+!%?YGd7t~go>zi*i+ey+i6W4+5+Vg**lRfCkF4d|xZ~qz;o2Q{ z*abm-l*f|4{(Ql{`N3Fanv-YM2$mErlA3CJ7Q+U$MMRdMUA61bq~mlErv&R)DS%_X z4$59`I(jr#n-NQT%J@>=DeIs5hSge*eHqeR3Xu(`M$nha#VLwI(#p=)Y&X-*bO`oi-`spd z#Z|5tuTVhDiy&JWlq8qGzHe=ycUOm=sqi@&9yp;h{x2teS8(Cufa_?&f|a*x?%xj| zvphv}YXdsSZE|ioQ93xdpYpyI<)%y^LrlJN{5c?au8UHn2e&c=hlKGWpky*j(X~}M zx}cw)>GA@aHkgNGx*(ba936>DX_8098gu{4HUAHl&ubfq*`wDvV3#NmpPavcQI5-m zik)Wt6EWtV7l#x+fPh`4flZW_t(Kjde^1=zvcja4-)QX>SH%nH`J4H88I*ONf%cpjTY87TCFSD@==I?wFFC)AVXD4rVuoq!K=HRyK5lao%9*y(@Hxhu^cG z9LGk)oBufR?Na|)5C!MHtlBVn3CL_#)&z7aSETi_z_L9zHn>r

{SJ~!Y9>I3PqWedKP*SW{qUGfR^((-q<{G5uF z=Swwx2i~q_{TRd`=lM#obHE}a@k(CTAJXLVUb&ivPk=~k0{+s4xPOB&oSchk#Y@-N z&Aoc}!thRVt&YgjQ9zTa~O5;S(^--a|1z*l*QC;GD78qa`hLkLR7&#Ck<|OaeJea)>07gz`5RO6% zrn5Sas=aE8I`~(fJYoD`Qo&w&JgUaZZfG-#)!W|dO>6@r|HDr!birZIJHZe&4%pWT z^xmW9Pq*tOYw=nRq|Ioz9)Dc_i((Ta?zqw{`AIj1%*qFR4`S$}9PFqUp~f{InC--> zn9vwee;FL_W9eruu98tiX^f<3!#_hib!&s~BI&jx=oNY+(sB4$rYN2k6%}4NN zElD-7M5_PMyQ}U%iia9hI)7`XVK*>7T_$HGHY2+~1;$nb+?d+ozuskbg0Y zgW1QS+^#fQsL~5Ae&hD{WpMY?ITsm7o6hrSy4x0A`S%)>8D7dy_TIKOr7eC+JdUi%a_sOLi&#Eb!%AaT34hak`qKyR$#6Z#5A3X!n zBon$`kA8vC4b9UJy<~Jyq;~-^*vZB~&FO`!I@=vNSwu-Mwm((C8FZw`W4>d;9A7(; zqmNQdZp?-r$uwR;D0;fM77~|#S#V#zW-E5P%K-2JL$BUGgFCs?H~f$Ruko9~O5{0a zRSuZ0P%Zo9^gm|hv04+rXL(H1w;m zUOEW;nK^6Q_yn?qf0`3?&GIG2But1hE6wTT-8@}fUs6{hH@C1DA~;|dgm3^?v*I`Y zu5C_7)Dpflh><6Eh&2yMiYj6Fm2USx>^jTg$_IFcov?teG$8ME0ixAmEuI36(Kv^o zT*l=1`P`c3rNK+vAgoN^*Y%cqK&_$uCSE_TaHG}uX*G(Ep*V*m?Zhk%+bLU)=mUXFP{>P-qG^ zuCY3&7=4uM^XEi!-55nXOzN9T6L$A{#{hm&X%OyW1!~HN#S_i}JKX;+Ffjkf6Mjj2 z+3(j5h#8Nvg8N0*H9VRJ3!^d2xAHRTSkCg*?>tVQ3CRMeBF8UQIv@yuD;mp%j)+o^ zZnE=-la<5UrOb2$$KnZJ_iDpm%<|zG(#W(z~p&T=~(3~trw1XT5hkyU!5gD5_pT)Xhs9e+d>PY1aV zN--Y8HU2?_OGYxCc}PS3-2uGK_x9)xk@==2ri|aYB-Tf&y1W>7gWH}G*bqW82fE8h z@46SI`Y346Sm}0Ru|&n@L`}wK5G2LqLMe@_9pC7iOCR~To_1Rq*ztfM!o*E*$qoP- z#WG$(ksL6KRj{DI;DidQ=-Wm7bly7r*1^P*!BcH347whd4=_qp?x^uss_WNDa|D$y z37YeFh+4}2vD1pQdQ*yqsf_Z?LqMZn`p95MogcG5a)?2H%R?t}{yteXz2XE3;e|5s zd+=y^zqawzgK`18u8?R=mCoh=?4wyqGi?6(hE$jtTxnkSs!PB#0j1A$Qs8;q1;@B* zl}#g2p}Ua9pr`>=O~?M%Z%%+{DKfO&;ax1{=&iqHgp;LX;5D3>WCzXJlwpT@(+#vx zS82R#6+@dOJ9TZ-90#^&gf$%rKLr49mQNcynyE(S=_7%#sb_gUp2Dx2)kkVwux-q& z=f9g@m8--TRXiip-;XL^Z^p)u$P`96M0UBm5~?uU1ugH{kj>~}hP;Ke7A}%x5RvU= z8us{&OfQy7qI)~fxNA?NgL1u4A{HL;)fC4al*Y5pW)Ib$?G;S2LWAH8w+JfbeN?upRS@TYUb*7}Cl{LsJRMqe$LH z{XE=3a(WaNL+-RJJL3{!BT3{dMhvPOc!#yJy1VRLU6F2N&7Xm*=~xAqcs35bONQ25 zufk_3v1+a+jjRwyU0YUrluOzm6ae4MT1Wz+az2W9Tt&C_r*fz(;eNFyFH^Vi!SQV9 z?fP^&v+}Ix22ieD`_VWOnI2y`Kes-p&&d^2V=Ka^r($$45ayur^QMu86;YA!K6?HT z$fJ5ny4rDvmT;B1s>;#eU(eTX=UvMndhx{4^_bt(n4=pG%hw|>7#c#4me&1C9^U$L zTLfEsy%|J{#@tBD9@tF@XsRiLv7K9u_2a8|=Rx+x6EB{j)H+@evc#bLp2-%FUsr(QWiS^<%jvAYSNs7B2BszFMrig6tGPLiuvC zgK!aW_o<-rbiMQvOv(97oFe$Hn<#x)(Xdbjc7r-Ju%`5Im_(eu^`@m^0{Q+524NwnqcWHU)k=V&0w5 zDt~4KNdy#I(6@18&>|}b*Ga9-%mO`$R-C&Ox1S=-^nBM6MWY7`iG1%(R|Cp>Lu3jZB(y-wnRE%4+gg=rSFj<-O+)B>|5}quU zsfC^kt55iVEP-e>IxCD-oBB?DJsO72d*VWr1549J))C9HTTLHalfj#HJjJ{_xSUjX z|Le%nYN`CsCbqxGtF@%{zDjdkLi|!ob7afS9yIAGhY^FPhpU*39<1VxHr(|0M_$eL zg_vJz|7}c?Ktzw=%>=l{O^Vj`@@&H(zhcxcj)H@r8+F|7lBxGAHFY>Z5`0Y4U?19+ITwyWvf-_TUkP>l|Im`lTKUA1R8i*X+T9)aR)Q^DIV zSc@F6Gy2DS-UOPb-h&Uu1w4wUdorr~NYUkFPl!s8PKd5o*JWmjJ{g5A3koq1ximz`Hd%m<1zpKo>l7S=&}=9%qzaknxIMOetkjp>2*-$MCkG=_Yrve70>9QK0!+JiS$w;CQPJ_c)d=(6*J`sA*T4tGeDgl;@* z^ypZXAAP#{=|5^Yd98xgO4}Nvg|sPK9KjYELCjD;^V}!^&(Let&&BgyPG%(7_w5kQ zSu=`oa-+LVe-=vykFq&N!km!{Dwd1%nfNJEm75Mjk~YO5E9w+hN0Na!9W!6w`d5R{ zb9S%0EOc+G{Nm~a%DNqjYqrt&F`+b{1QL;1-MB0$&t%ftgQ?VhNy<36jqEODMVH%3dxl3oDe!gP>hIuFLE z92%U*Xx)D2@!F%+n1Wi9gQ9{`MQzz{)=(tijf#jMUqT4QeQmpQziS3Sl#`9dF(1V6 zfVc*AkaXk#!a*3cl+ysduURJr#1H==Z*Qa-2T=S2PELwVL#&t>8%AKh<@AtXr6E%( zQW9#FEC0>N(D+#y->+F`F#Ng^H{%WHBpI?(!~5t zeQk@pe3$I98vB3Rq~$!AXokb@HOec#KFY#)dsjv@C)FAFO-GH-q=K?dJ9xrqWnj<% z`3yxlf{kTS&cwW|q*P#hbt6}u1ICFh4V_q;C<~1*i+Z$q|3YS2%@FmF?&@appe7y8 z6ZwVvy|Uk>4~tT?U_6BoBxtqcS(``4w)Lod03o5s(x}1R(UDqGR-|gs`Rd6fD}GRgqg(niQp(Y26>33@F!!JCSv@c9ml<-1!Z}bl-`_D3M z+zfU}tH*^F5x$FWnD3|BpzcV;N>`<*St}A|nj4 z*7G)U?vs&Dt&JkUvQcTH&|RM11ASkwlM`8nJ`jwDX;SLEe9@fTSL``yK@ci8@1|b> zw`L_IbIh2eb_QT!xk93d&_-P_XB@8jzQ+VO$22(pO<{gG{JMLMKlCmb@5ZEzFOvgW(m1kbFWIj-^TZxO&{24yv2FJD~0+aX+65{IR<0_yc44=6*3`jF1SF zr7tDaPRyl=_@5T0)D7iu*VcmEu_g9x=?Fj{HdO12%!f&o#6unXW{MT*1? zH}B5p-E$$g?h0#u-;%xVWNeI#Cp7gJ02~3+T>Z@_Rd^0nyFt;LaKJWSuO>9R%!>P{Db8gPE2bYD7Qxs;K8*}euhB@D)Y)c)7@T88-> zg}a!7doyk$aS#Js6rmT|`0C}|%L=J75I2+6^Yv5mv?FMv#JZ;2u;;BKb<{j!A9TMA zuQ0qgbLwyK-;WoHq#JI!BxPr-Ex#@kty8-v@d|?*iQregV*lxNPVQJmQU#Tv)nFOY8M8e`E&+M+sXM3* zU|%TzUtAaZQB>!LlbQ+(@cVr>AMkBOE%e&wcbJ9lLf?M%_v6(r-y1r|d19G`N47?S zm`{AttLfFo4TFw$C#o@5>Cp2Ro3!6&^oN<-?{-~+zSyd?s8sSo&8uFX_3@cMi~3q4 zo$a|d^Ug-oV*UWt8MLPAUok8YboK8c=JS4+1eK$w`d=PLphk6Tqo#+ddSziEHT-V< zSRnK9jPF5Sfsc$`*gV9exfK@rSbJQETL<+{Tl1aqd%HK`4x_i)cfZffUB*t*gj_p) z{_=QbGrT^fC;Y^7R9smszvt`+>iw7S&bB@5%-P!GX--c;$Ix|^(Q6BOVH%;bfrK{A z)0%+N<(@`|Ky9Y9YL*P9XV>EM+P98oLLBW+I(fAbA~j^sG@#7=qZ739w&S=ofPqFZ z{P<{Qw2(!uz@)u`>P+LesGwDN+8NF-baO%Tu@T&8Fx$v?r(=3&nP0a3xRyt(nqdhx zu;Vc(vcTAJ_H5^e`u7MG@VqnwmbmkqKPC@ z&2f_Bs#PkfpD^g(f5(nzSY~!P`DQ6Pj%;2%wz0B}@Q%0(to%QArZ$4l?wq*k!PDNK zeQUcDw!sqqm%z2YpH||>N_&CbcM1u_&<=nA+=vbQdPCzZ=){M(phL(M3^qDh6FmFd zF0v0ldj5F(X6kQ7zyYjcb}$QX?9N0M3La6j6%bF*3;A28jfaErM4^aF8VANZEK{AL!n5JyGo*O+BZ2g@(nqZ`RUD z2HFfp`^VMK=FbgHUeG9aEUyYbfep1GdrBQopRW31N`P`Bpf*}}JXVkDULl?EXErtYs=_T{@fzZqmJ z!qYh}Yo-<~TvgEqJW~eM#`mOneg8d+uL9-t!P?`HH&tO>oLrzG!_e*G<vLU}i)34wBovx*{k&O6xztsH-ar z;fI-f=_>6q?130Px_AwN%J@gmHit}?t0}wxG?8?=Hb2USeQGh;_%&srUTUEESE~T- zeQaa>(OrbyNMi92)wvgxYxLG(<)cBupOmxT5^bqbsfNEFBp*6z8rXG518x^gn%rA` zJ8k$Uf-mJqqea_kwBRbVdSSlM39 z?k5Re?B%6fd9NI0^NuTfn_Bs^ZioKW z-VpUU)-?V;KzER1=kQNsIIN3}%R?vn_eL4gO0YOKH_B$Rt}@5=gtooGf?J= zcTCotfMD<+ZYUR`+x@K&_x~E_#J;E!(y@tgaR2#`{h{qxO8)WG#)zWh33kt_Uo?ei zxuEik!SOF);Ap?Hwe0<70Cchyv8Mo4Jr$9ZvTimQ(un}CfDRB{DyLW3-v~uL@P*z3 zODk81N%P@OS2{*MbnK%6%WH@UZd8HwiZ7kW(9@fiqn`RaJ~-WLgKTSPh*sNG#|k+H zY(CKV6{fVW#g8APYl%|q64_lpA1)3iluy`UkUgP$z8%O1CrPkSw%6(Q$W4r$fw zID}r?2nLndCEi`xXDXjy{CTbD~`Y8aO=%z+R=K_)Xt-ths^QUKM3Sz_AZhs&*v_ISA7#~(q__5 zkT2R4glk;Pc;_hcqCm8kxZZweYP0Hb(1SgV0jG#lY2NzT73I+v%v%?Ii~BtBp#<=3 zh}Q4DFP9zu5|Bsxj%@3#7U4(+)Ai_1&y4@vSsgX?oWB31@_4e;n6UaKciTIKLW#sx zGHtzxI6i}depbZ~xgW_gVy{T0^FamEf0^dhSpyHvX#ur-PcvfwG@4)xqWW*bJDqR) zJy2~so{n1@!&_QIv0y6Q-6oaj6fj9=M8SWiz4M}N$ZWlnqJ{J`+LWBEog&#VV?%|} zl=5e1Av??JA-0~ky=VOjVX=9(Bf9Q%ZaFEPy&a+$fdF2^9U=u#5kmb zCoE|aP2O7x9_40p>un`nM57laJ{JXfvlvlt2^AYrSJStgj+or6AqtPBoteF^e=m@= z%~OBfykAo2e`WRS>jZ<6OY}eJn+Rrc{JkV1gDVQryn<-ExhIrEvRYOKHVFCSf$0!^u4m%*wmPO9x*K7xo_jj zEKBtFZ{Za{f928~Yq`cIQlHf=gYB=NP}}Q@uU{*+e9ydTDHu&$9?igO$yVQ>h^X+G zIN4jF^Cj!DROYO3oC$Y?&r?17FTR0Eq;b*LHNfpxz8JK@R}WD?R18hR=V$wGQ016vtDg84%TON=togFmI7ffcv#$L0TR4SJh$|L;y);LMS) z1-z};VZ7xzVLfnJ@_b-OVNvY(sJHC6+TnXzzbgD#Jc4q}ReaqPJrE})hUA~M9) ztpHIyYcmHaTs8xW6c;Gi?um{fXvj(bD15DdBed^?xpIks>VZwrK%%2`3LG#-=mSVTvCk4 zU!WNVmpJ}d5B~i#XJV+&^Nl!{bF7DE&DP@?w|#Nw%u?d${9EPkUF~rSB~noMYx|iQue7^h{!hlc0QNlE&N1rq^271^7KK9+!T!_U-6~33L&Vdnm-(UA;b+?8l!5$< z4|Ib)>?(!M@c_*qs-ipmgIP35)7r2K2AbE-@0k*N_1^qa6`akh&h(^xP5b0p+3D(<0KScwn)oZux2&o&FTa^E6zI%%?qgt?+QByfI zY`~AgK`@_Te5->wgL?C*fXGM1#yZq)Lb}!y#d~crR8OtRe?TSMW8pA?Ju>E7mE6tPv$$dP4xhIEK4l;Z#HF9vXw-|F;9CFRlXk_D@dkuaN~-|FO!yB^&0)-6>{ l$`H2yo-zHX_gp)p4J|sLUs3g50RFi_VXSYiN6@(!^*@6=RyY6v literal 0 HcmV?d00001 diff --git a/openpype/resources/app_icons/celaction.png b/openpype/resources/app_icons/celaction.png new file mode 100644 index 0000000000000000000000000000000000000000..86ac092365d009052c4e0351732ad1dd067329de GIT binary patch literal 4012 zcmc(ic|26@`^WELWY5?hQj8@eWr-Gr8Cyu|$r3_CvW#sQV@t-M>_SK(WGgL}Qg$=4 zmc}mq$dW8QWo#4EFwFPVZ~5>0@9+D^dEMuJU!VIr*SXH?T=#V+{(ZqzkWY#a03c{? zW^4rj47!B@o;}dugn!}z4ZHzn4nY9$i~hb~AU9tUg4_giOB3#|+>!_doj6Y2jX%i5 zo?sA6@WSCiMdb%G2ni4%Wb7W~hQ|gFusDAp$}6V;alYT&2p15J$9j4Zz;mGwry;uc z55>E?{+7xTuzv2~I75gRvhe=4SmQhhp{{s$;2gmIE$9E^92?~3>k3KnppH_T24SAx z*v=L2kM;Kil>H0w5aa&u^}%>Icd)^Z{qestBb=X~yFUSJez-@5*q+}q8!W-s9Xwec zQidW5H8(b}3CpKVTKXPfgm zWlL6rgkbP9oY;UH>}a%8?2!l&c!mQ9mk5PBbG1#y=Af9btUxhGyP~7T?6R&-x{ViY zsduSw>BACz=X0}BpYB~IOB=_IzhKKnvb;LZ1J)f;Ic(37G9$<{yRlS8?76x6ZlPo))XeR|A5ESt^l}N=n1Ps}$B(WJm#YpPA zwltjCPDmj#=NTew$?ooRiM6jvMVI_nYUTPS1t%^y&6~TG9qN_CCHf+{t=LaRTdhY$ z{~sH~FR9MZfwl3K;uo@e)eH;^p=?rhdB_N6bu&udFSL z1%4H9=@RtWeSsA3@+OkCeobW%oxDw^RLbvYk@lT<&fKqR(lD`dg_zKVjM;_q*F6rI z9s{lSdOADGYjawG{&K$C+qg5~p8m~|RqWTw?5%D8UuQq^%<;iAV%5gh*=O@3JfCux zGlSwIfL52@rs^@fQ}=JseX1|g7ljzWqlaQ3V{SG1@pajjOTMv4q%vbmJmyP;e< z$gjLB>|m1<1tIfMtu#7S>ZTHHX1aJ~pMx=&8>n8uMTRL5j^KAKqr$xN9GT*?jeBAY{n_kI48e7?y9&*5Cs002%UK!p*#VXr zCoCPNjT(A;o|4s|tjs=Cz(AS{tT?HiV)nCXS}i#X@d!}ScwlY~Gy551xj~UyGCT8R zm^Gv1a<0;H?E#*yMHY%`(<6y#yybT=0O$8n(~ii2@*Y~jM=#=aUYLfWM6B*-^~ck! zw#Z!M4gy$fcqR2uPZ8cp+sF13{IFULoU$uneCgG$3e6$<^>XA0&6$(5&9>DG9nlOR z(!tWdE}a8-10!hE(gj9#)K1cZ>5+Q48KqM;ZIu~0dRLh$l=SYy3V-AAo%)faR!@5q z(2=5|-y^C9yJk#R!10jo9~@r>O_0(dnZ^)e$Z1E(HV(-KoxiQ5(=xT$Emfz7PI(ZK<;g&DaDKI~FgZw%VVSgi%@!lye#W-0Y;v-QnE~f;f&U}8 zay^~v0>WBH^eqrlw^x@2@xm z>zg8-vO=XKEs~GbpLJaMFej-I)_k~fQ}$Y1=?t#;|i zzGPN_{y6XOmj~}3V>TUn)!$-KR;C~!A!639?C+(B_Noxot!xqNo9W!2IVBB7Uf*3)nP^ohzvh8sc)KAvlRqW`Tf^2d)s4wuDnG+n`thh)+Cre5ypgzLl= zqAX+U#W1-x@_-qwcP}-~Ne)yFzV5t1MtB{-+@hG9#R^H!4KClq60ZyQMjHu8tvMgQ zsw;kInlKw`oEybA_sxWi8HQf*`hM6zcM(F8mzs89Mx-b$xi@-d*M9muT|xO2r=?{)wgp@c+NV4% zi}7X6#MxMIIjHR|lwS(9d5XEI8t+*`-Cu8g*z8ApUgkZWk2Q7@x;nqsCMsmMSC6DG z(C_%}?%Ey+CV>RYolU{;`t^@J7}Nz(ZPa~aU2|gELyuJ_>4@OA%+cFA6_bb0&C}Uw zWimhN;Ry!eQ^_MNZcl?!rC)QGFI_=SWexFoinpgskQ`2LeveVkTS;X$eOi--qXoRD zRa026FXge9X~mG|v@%%QRpT?8ql%{Ya@pr-e<@c5&fqlnl$_|Xr56&;Mlfc73op;= z&@R$Hndb*TS6ju|9pE9U)Qq#RGW*+wORQa9!!mU@UlH9tA$Va3sLX9$6!6zyG4_rr z%wEI~lCa38`Oxm8f@27vdFmAZ_3iqTaH;Oa^?!2I&-%y2tKvFCLLr61@fj4Gn)+Fr zZEouR^`G9r-uB~g^!e|Ymh^4Ys;lgQlXv>(r|e4D>H1FL7T@b|9aa|fa8v!H0g{m2 z)ZG`z)LqG-^wPYj#cpiEl&A|iV%G%JJ)?ZiWR zV&jO-A?a&>GH<%oEC=L=Hp=3j_&;{$o+`Q_Ulo&D+uO6B!$qL+jlG26a`Jcc@z-c$ z?N2#&lS#Wq{~V%Ot#?(dC%B6&Ct{#(Zn$p_?W2f-{?;TYg86U@XLe=Jw36(qs|!bQs)Z=Yf3Y4sDfDM zGhWsH__g;>iP>7{h8h)##W!cYT?9i)rAMY&b|0b5wj+p~QuRsFt6H5_)U~fIjEAJz z&@vo%D)c1n^o{1FX2wFXWZgt%@}gc!Z6R7{zSfb(8{HCOkss>0Zs<1h__9E^B}KD- za`=i;9wzo-!9vumox(|SLc-!@_jf)&W!n8x9j{(f^A;rgwxQvau$NMvLQy|SZG zVx6Iz7A-3*3@+(Zlsg+fK^JZ~aZ0p|cWjRipjO&Cwk>1m`v3D;htG|R-@@MzTX)6I z?!-MbcoOi+?<@km@>>U#^9n#d=)ZJ#MysP}?R|X3%O~gJ3Ki@5!Ne#QOsU{{8>VLVfYI+sLKj%-l10-WB)&1Ppb7D`0j_IxU zj-eWF1}fip5v`vYGO+(pL7k3&c;Y3!4Epk1ny#Qc=pwB38XS}42)f#@vhDst=Uvy5gKiYDm`KL0@{yyLun_qMkS+(Qg2E_y&#XGj|H6_$CsE>P|Mf( z*1+QeTiqu+q`FhSXiXSs2BxuIem-Q=;XY#`Cyp7**{ZbrR$)7D*4P%kTROt1Xmby< zajQChSRB)6;PdwjNonXz)us2JE*bwU8xCVl|J0U@m;O-jS?!)_? zbH4lIo;z!?m>IhJsjjNtRkf>$R#B41LMKHBfk0TYG7@Sa5IE`i2NfCk2HyE@4EROj zE~)MQ-pSJ4%f!_JBx>$tY5|pXFtM^wvoJCDaT&G{1c4A3Y}B>gwG|ck&72%qO`gZF zdOJ7+vq2z15pQP`Gdl}+sHugOjiV6Facd_H)W%$hMvGgKUC~+G!rDg0*VW>^uadf% zubmm6IgN-gRM49rSir%;-303GVDIS0?=3|0pLO|x_s?Im(Lnz*#NAGa=Klz#t*8PO zcXG9Wa^;AyVr zmi%fGQvdZV;FA!IwY$4BKO38umlvzoTUIAmD>e>3K0Y>fPBufrD{qut!qEdH;&|DOxHsrxuvu&G(NIeEC60fw`reSVZPzqqS~iMx}l zx|5Ure``_2+R5F?&DzNsDlY!4H9jb#mW`vilb74;|IATTf6nOjLqbijSR#`@iQ(IGK4kSU9@>_gwS; zJD1~so%`$#4$i=nB`jQRJT1(nT%8=C|JgRb&HpM3yX616-ha(C|6gTcm-=7lvH`-d zJsbD`W$gd{3h9gu_=<$cSz{|q?b~EWGd0ZpUOGe zBBub>BaVnr0~q7>Vvk(HYLt* zM0E?EbRh&T1U)?*b=)1fT`l=lCw@b8kB|Fnu|U24PG@^Gjwv1xke`q}$r-URS}%ug zJ?G@ODcA7v?8BYQF#U&X*p)ZIk(meP+Uw=(0iFc|uf96K2i}{D}t(+GH8F(9+#+t**^s8E76%Ue!o=8$(rQid3M}-+|eqq8ip^20OD9(x)}t) z9NSI@rnTE!4&J4XTHM11E@)^o{@StwmX|{y4LEadi4$?}k9)1GjbEJO8q@l3T0TIq zH|*|OVBp)N@V@Y6ZDRBC{T9!XEn3uAmD6aG=0-of5)U}ig>=qs3Y3N{>!u_qXG?J; zZx?KebGMX)F4V8ewAx1wJD(mIyuCuq1Rt;0kF+{((_0^J_ZL0p47O*hjjE~q)CcA) z^l4q!9rd^Y1qkq1AJAY|C!acMxoh}9n3Y5x+(l3WvsV^<5=Ge`|JYSuAgG}2_>vf9 zxTFsyeX!>yffFAJc6GC!h`5Sw5eRe`J?v@Sft~+dTzp(TsQcQ+ZnzmO+)ACwi_(DAQ`F9_n6W6 zf;*JNWbUtIXyi~n4(hYM2cc*<==@^azG(9~U-)g6cR0bn&)jG}jE5iN85fc0vYD>c z!c^7NV-R?=dDOi9>1X3{ipy6G+;R-$}7xaV;pkI|Zj&oJ$4ACJz1=rZ}Ruga4(@0M%^_KsAhi7 z^{^uyIJL*aAooSZYhP7w9Psc$6!PZNs%XiY)X3wSFpw4&Mz|+hRvtP zZr-m>!`A}Il9Rvp0M*%2IV*gK4Dvih?t1FLIZP+kP@fjkq#D-GID$-n>7L}*tac7%jv{yQbl!{~^A@A^^;z|Z2058576mvvdn=+I3zNgz`NuyBS3k$Ho;8#y82{!NxZ~K_ z4jdq{YAXKZ`{o`WNDt2T-lXE?itA*DB!Ckh%%=)(<+?)4nldjy_k$vjr)mY_w1uZi zC;Uics{ykhJ8I36D)aanP5X>72nQ)L_O#0|>|?$2s+iX^nsNAD+W+)EBo=t!$Y3)% z>Ug}UzB}#1S3a2~B00wYeZi7S2^__@&g8GLjgnn3tCt@y8(Y2F)+vrQQkCDCm=4yM z(d~G3-sRr?nd{i+*vmHl;L$M;aQw zKm-NtCI{fUl-M*z030uNb-{z=2l_4JiQ#cpC)X)VmsD3a&2SV)CR*CUTeRQd0vI^W zn;Cll4%tTUj%$B&fJl_PYE>vZXxDk=$~v#wk1y7N5W(Z{E5T&Y`h@sR>rv-Z;8j0c z;E#Y^d84y08{v+rrphx+SSPn8LIf4@>Tbnj+Ej}1*O$guF|CBl;_KbU{2NjH1E@ofn z+s-MZx(m5~NL1i&$f!t@wF@>&iL(P^%T)!Pgi?`%DW#;O3dP}Q>E$9P!Pqsd25k)0 z7V&=$h~e%u-sCjcSHrvxPY+jKc@GCxi$RauL4W#)NgAx$zuYc#KHYV;K3yk1Ij5nT zI@C$6H`tnkra8v^EiozYR3!-_B+ofGIZ6F)*F_%9u0BrsOSO6)y$l^Hcd*2S@q-0r zethY3Pxz$3^}n zUlkxk3$x_;O&4W@Wm>zM|?hC2f8?EVhE!_o0@=yXR+-#SmI zXP(25*Z#Xm-9a|1r9!6^gC|XoE#alKA1IMTn4M`ocIhNoNwnG`uoPHYN#m<#fFPDT{tl2uV4vh>EXy0k!AQ*yT*^k{V~8MVt9 z8(;M5Z}g`GzM$9GRyh0Dqn-EiBBVe3pRJETiH>wZaKiWIpw!KT9?nhIRZ z?fxT;JU^!Skq6_mmU+o;74b1|=9?DKpwD)$6~3AIuV4b?pQ*tY8_TK3^m$MJibXCm zh8De+$44VA+Qc3oKabaeWrS9`2fCV{Q-r%6X_38osCF!~-T+_#;#M>0Xo`;ZCglb3 zOxH%sewAC^OGP0PY=au2o>?#3*7wpk&5w9L-dSVZ9SncwY}3pO$rEZ74d;=7P>9bCPXU z6$+(y7mIFvRB^TiIn%L-z1lQ$lH7`DqK>{+>hk6H#u=n%AI~)j4%3VhRu=wBuXeF~ zmeF|PUdJ>$#lK+r8&pKTPC*hXjm=#td>XAhUqnpe!I$|+>}g|&GD@6%-; z)U{~53M?~ZEj`>TBv^cSAo92_($eg<{d^)0H#AZ6>wv7XwezmE^QTzclu+#$JBY*b zJZ|Oug7Hpa#eP!gife+Mlhg{PHY_G9obgSAwOd+MqUaH=I)wG^) zo{N>pKjMVV$C@F83^f>IuK)UPJpV*PSHl2^D89_ezFw7`dm7cQL6=rRTR;Ugsb>{; zb=maWWsJ^hv=^zqluoZFOB5)Cztqa?f#Y2N-OhC`1?-4ZxxTd=TiHaaW#$C!(tPbw z_8^(uHKP1g10W-(EpL<|8dr*AWh`sUPyhhOzZzL%EmFWTFWCnI&zvd#gCE|Y=b>*< zXg0fw44q2}@Vn7(Qblg?1AGjavjH5>MLc> zV<6L8nEoQaSRM4R|6uyt!JbKFz5TznyXepMs8F?3ab92xO7ivj zajeBh@DStD2Ghps(`imTKh<>?O5AyTT%0a4O;d^Om#cRvv*p#xHxphF^cjyv%s(+< zQmb1XN|YHua2GzR8)bESy2*R8A$J1s@tqwVmNgfIA@6?U)uLC&Ifa9LBVC3AGSs21 z0AsV;sn5jg)gltGc$^fzpe`YHu*{>+pukcATPM{hFGAA0A717PMacmwlwHquJ%r+Q zXzlwoRA2{;gJAQoLhNHLa={7*bGJrD8Txoh?dJBN1_mhm{)1viUR zQ#ei(^v8>KplMQ2b2eJP!$tFCKvltVO}rYA2u4s8CY#ZDefgHm?%zqCbn?*|--P8v z<@PzMbO2@1=|4)JCyQLxY=Nb4dY)A#_~2T9>>^-B!}pQT+tov!!7Sr3TMV1rHTMS6 zD{l(VOPkI{ZE8 zudmt+abzK~*EugD;4%tEQ5K)UGIF2AHt7~LGwSRBW7~k#63NO{8)w5`8tzO0m64%R z%=4favVWvLyaB5{;enBstRLQ1adUF+Swm$zwAh@1gPas_pt9_>9N^>Q%S?s^D_ck# zK3n1O)ps4H|4+KE^gJA$N#cn}cvgORe(1AgZ2cJ8zh-SIQn-44tOc_-P);6#`<^}I zy(k@4Cf848gKk8g+1JH6JwK6ZI=5G@4Rs?I2FpMm zbun2y>^GN<#F-*I9z=CFKd8_EpxvIXJe|LC-iJFSQgqSH$;X39sUbOpq}Aa*=JSiH z5+pNoKvVH6s#p@!$n#7-;S&PKg3bPL5Js|(PPq|MVr2ru_U-FvOv1)b-*w=IXQ*Sd655Rzd zmvm9=6E)7ZU#jsENZnzhq#bUjmu=dLW?nQ{lSvyv@_* z+LTqZ!=78jr&`qrhr4GKT2yEl$=R{Ws&zMfx}Cej9hu5~0W-(LPK%8Fs_@bDIeFq@ zh0~(ZY(5$uom0c;z=aELAoDZx&esp0wKI;~iH5%1OCS2JQ9ZA=4wkV6%QsAFK~m^) zefLU_0H~p)miHnBCCdt69%S;leMNaDZVOK>Hj(b_NjI6aDMVUM4l z=0649?iP~`J|B0ol@er{%7endwdhw7nnTOS1Ry-x&70PvmU6$J*-Pg7R)BOz@FMc4 zG=z_qEVv6#E|+^ZW+vfr;YepZ_bA?NKse=ee2!O*voiEuC5SW^AtOwNr!nji_dJDx zxrNOD&TNBxya_rNpSb`kR3Hp;k}4PZ3A}M&#^fQ?d{dvf0Mm9%EtozIdh)vhBH3~~ zz*)5kstBLYTN~_KJJH=UJ^J;MEwR&Fnq+4l5CfPq;LV3D_Q|%TCAnr{YCJss9g!{f z(uP#;l-Q5)h*!DpDFWwXZ`;)Jv^u6kWf<)SvEV}+2dvmsms*}j z%>yRDDgxX|zi#zex)`PFA&I-awPO@KL_;&2TP8jLp8{w%s(t{1*1Zw;cH$%hweDR; z^CC8slVe_k&8Xfw`EF0EbI;e;H?AB26;Je)5+q6+=7f5QsmJHs|33h9bEbXi*^YDu ze)K!EGuqGYB?A^cRDh`|D-==27B1Yi9)8zr5?Vtrz=Cp8OAqzxoNtYv0~|Q;(v&&$ zmLY|f*7G2Oe%n>Id;4lKhqlf_P8aJ+=3M)5rHQ41#Xb_M6|~7Vl(jc7C*r8T0Layv z5otXEBFn$LHmyxffAhZ_0zgWVr2btd>kC~MjzVJ4RX{EC5kJNT_haW=+W?DBU0>56 z5NV2$;_LmybdeZ>PEdkYq#0vdv0zJ;|3?;eqtU;*<6lSmVLjVmcw1{ZNP_j=;>>vu zAf=7}Gek4W&7)s51)e&4hPn$76Q1YWliRzcwF*b17=2FB=2jce5J3LCu?>COD~-Z@ z+z#bSrf?$hj|-klTZjhSU6`}={0u6OP-tGs zj?8QpB^v-t;P>+d03?Uv<8;E>MHv^X#El|_#P@bzruYkUacwLCA-zS60;QUKZPuBX^7SJf!x2y4jZ$zlnL_xv@z*Zu_H3WRcVLs_Vfv65+j zN*w#5{iis|;v3rrjD1K@gu{V{p;Ap4L$2`J9Y#P*fluV`;!t8N@u1YJl;FyfR;j|q zq54BBrZhy_LZyv!TX?~-R*X|ogIIol?90~j>JD9te~kh!U>jnxyuyA!W5R?YL-nUN z3NrG-cZa`%&QtnjUwRHyJE?_jjc0nd~MZ*_;+J*<3a$)rNdPiv^pe&z?ak zXXhb8Dp$0OpuXi_L$YeIHch21qef}dXUNbqseZOOIw_?&YW3-e3u8m=QD8Ie{leqj z@v@pTqtz|eix(}5+3$SvwcC9g^nezKUW0AA-)aBL2ijvWIUF55Why&&5TNmA z4grGRaqm~D5quNWMq%-aDV(q9^d9)MgxfO`WR z*1~$C$kC_3TbnDuU2o?=XksYv(ug5yK-zJH;U&N0!&h00*$1DuZ$gkT1T#zana1MS zKgiv2y|sS0y*T>hcjN+am`)4-c7HwynYs}er)&U~zT?)7Ts9h0u&mH*v4#@29=1%O}3zBwq$#(TB$j7QIaO0W$_S51ItlVZ6bd@F$1Y&OgAkv(KJK#DwdQUCxrk~@d(IfobBeSSdhkgv3M~}DNxBwNo|F! zRVdgYTv53vOP7a#A3zGu!C>`7;oB0Fv`DJJc?uxu6#j+@?KVXVH}Ed}sUKA6R{xe< z{P@S#c=m%($HlDP18vpFGcV~lk;At{`kLoT;IIB~j+Q&(;f*}!9ez~lhRNcK@AX-_ z5U6Xd--!%nK{6J8-pT{q0f z2=1Xage9EQr2VSa2^56r@Gx{KzK+^EAQ|{)M)z~ZN6E-dBg-Sz`1;2A9GWL<89VE` zo5CK(88MbIS-nSqv4QxsjP{DlVuJn^fyvX@+eztRH2{PA z6!jH72}G_9UC5k*fT1HWdS7Egf$T?+rc4E}ntJmHmvsB5UiH1yR(f(DXF^RKW z(qf8jMo6r3R>6o&h;mthGyb7)oD9VEd9%a|n=aI<|8!t{Hq(VC@Q^oe1VjxgUUtau z%UpKMPgu{@`C&UR15GqxRT|mOyT#)+y_5}PP+wTp5ASa>WfxM_hzvHH*dV+oWX`*X zvw3y)5<;RO)2HRN{y?s`E^>%9L6_h1d_bb>b7*Q_gk8ZkR&56sZ09{9RTn9@#G6av zP+T?>Xwj6rEH(XO@IJbOwiCf`Q4ry_#3I|f-L%$KS%(sxV6DlkR!KdKNpOGg&A=D! zgeV=8*C_#mLOtBS(dJYSuWcsEmmv9-9g(>MTIn4|`#k08U>nbbH~oKgYk4Yi2-V7q zl&?S5hqd*+?X%o$Ra zJkzYz;Sq#E<%>gHGv4W_Np3>shG5O2VN^rm*9^TT`XjXsGDkfk`CA*7}R< z&fywckKyDE8b}T!=5T~qMb0Q&=fWZo{)HVs=XI_p2nbG{G`oWE&sXTQE9P7essw=s5CWN%UILQ6gYYIt6#rLHR9G$x!>31X$NuSM?Py z=3<-+?Gtp{(`F0yz?y_r^S;ONvq3-a7UV#|)v{7(H+t`vHA*50DYIt!Mi-qHvc_J! z4cTp_^`Tc0@^$HRCJKHA(XeF>vTt^zJ8Eu~8`v?HZj_$k9=Tu;Ci52Wi(&fH-6IgkFo@F{Y5;H2D?D?@!WOu zTJ-qPnT*PgwV;K7)Da0?{Zmtx^&9oWFMeY>q8A})8{kio3X&~vkDzr~Q+s^M%sdmV zzx$War>1U1iE+-3MWwZcJxP!a?TQmrJZzAa*r;zBUFtR&LP$^x#ly|ZL9R9O5hJOm|H zI4+NV8@?Yli5sXri;8lD?T@rfjk@ZGl;YYM8j+xv82XIbYD#e0w2wT~4KClDgOah}(IJsvPqpgOR*<%0J-V9_?8i2{ zQK$}r!*5;~C|``=9sWtf%@WpxnLh+IS()ki^H$-4!nZDbq%PQMX1ipM8U*y3NP0Gh^?1; z7&2pWNf05<;W8j`naLG;)HdnRZ4zZ1d92Qxf9Q5cU9Ra1c!h%ziq!L2T@1(g^>glT z*M^E4R(rI*`pvpq-!F?U#uKOQuH5LM%$(0oR9RnREFgsxK;FOUx}i6)UAA7DmS*9Q_q-tZ`rPbwC&CRF8O1M~$VyUb|VoKAYIfr51vGP3< zDF!KGqzfn{$wu8<>knb&%8)5`!&fPY3BQ*Kd+oFfuDSKm9G(|kq z1A85k*zAQ0LZrPy7A>8N=-T3*Ly@6TCn<{ihS9n3_|l`Ml3zf zb$OMaFKEJmXUTQR=DI9rv%BHjRpiZv>Xk|OKB7-ofRyoinFXac)#LVj1yfhtvRKB6 zu7SxIeFo;a%pZ~bv38rj+%M;2LTgLq*@W=5#|*c~x%Cmb5L7p;D5wZ8t7c})aTYjf zDwb;xgm?ACi-2GI=_ph-=v6AXj#J&c;zw=Bjbfud3n4a$}ufn zt*OdqkT^UFkDnZ+J8sJCL|Av{W;^eg@OzQ*nwQHDs9hH}pX*KLAl$-D6GD(J~eBu~~PULeNJnAN3yN6Mrs z^;|qU#Os>kI@qT#Bv`9n0k{_LKvxsQ7thJCG!iS7$wTMF|iFu^Xn0Nvj#6}g86uR$2V6#Fof=4 zz0V}Unu?>x>eJz$3B7RBPC&*GYVsO;Tx9F)vTzz1oIvaH)wTW*YE4L|ad^WU4k+Io z=w%NI(2uHZp=BUccc!IhPo8Z|3x4cNvV{hTRuH>`h&> zQ-K>3ZPFUq+&n*Jv!{?~j9o z&ea`%y0mUk2#)X#*4;}NB12t8xdwQ#w^}z+|1%dtO9hz*Z1g^?V|-|xQ3$BED-eou%B1GBsrMQThq^68=vCc~&C|0D{wnQ7PP z9f;^90)Z>OJ$*|+9jLr+0Mgk}t9nJ5GJ7$D*}jV9bdr%bw0xel>)MuH zXc0`l`h*`ZpUwOQYbQ)d81w?mGdxTRg+IkXrOLys5H?*#`q&Vr*i4plZ?LG5bI2j1PdCa3-3{fAZ_GBj9{@kcG;=+0woE1^&*AT2*S6Q#> z+pWuHRHo_Txc19ut;$Z*m$1{27UZ8VqP1^TIr5vMh2*#!3zEU5i9g7Wx|t9nGvhD# zA&zq^lES3GtH&o;8f?EF!VXI7Bfn;J&8GPTCa$l2eqkm(a}BM~RK<=8zX0#kcachB z?K5ZW&Lt4R$#Qrz>K+~+W|nV!h9UF^L~r5@;BsAt6Uo8f`ZzX~sR-f-Giz!{IfqlHP0`fGjgI>j z&VlpP@Z;miy5tQ+Q2G3KnqVo5Z$|se_Hth#l4kHa;=*O$e%Jm-PS&U-G*~jAXM-m? z-IQE@N;o+6&!~d8ix^2QoMh#6fT!b!9wo?9Xj}6rFk&Axn#M~C>$VC!jWxP9hg{=) zC>bG7Ta)^ZQ9{1K)69A(E760{ma_FoM<#oEWB$kEi{Bq>Du#s}4mdeq7HG|_D@47f zMK+s%cYvLqh^Bi`vx%BN-Scv!U0>uk6JqBNL~)F?@UbJ;`T;o%LKFBI%beJ^zH2yZtw7N?Z`pA<1lLsB)(U2ENX`n%AsoT5 z>=BYcYe+BkS6Gl1LJpyUAMU=qu4P1OX{C@Sz9KTj7S?iFfXF8=_h*T(!WaudW5H}Oj5Ld`;Jyn>4*mlAe8+ihK#(o<(&5bCC3aUBd$B3pFMR`LL2KAxXdGEM z6^$$_L_Pu+8X_+(??Gpfo~!*w*yq^gX*Gs+U2aZDS7Zgu7(s}X#iE#6|L>y!!Fy8+ z?aGrEqW+7qQkP^ztoL7P8@bL$tX!JK%o}l2#W{NLx??u(Zm+oJ)T?r2TaO$-(8vUI z_)s`=T4dpm5K<(ZnFH;lrfM-z_m~8Rj3qm9k~>@YSaqwdiwGFH*C8SE`NpPs*EaU$ z6^H|cNU%r72f+*V*d&+^T=GMKnCpGWEa?@jiyCvvJldz8$$_8b8|=j7!abXwY=U0L z3st+`O_5frE5$|<5|XiLZK3$hl_c#1**I<#-y|^{O}Qr#goa)Lk;!s{Q*n=HW}*uw zWWU%99j^wxr}y4NsvPu zLevpXwHB&}8A^wWJdF*+=-UzU3l`-!l#v=(Z&-{Eu|kpJxbubGuA}0ZFO^orrt+O= zbxA*JF+ZLNx=_5f#x0k%oU6ZGO0#pw5?r!Q|7U}AZ8HO^?UEnBXMI8Bj|#Otqb1k%F_KC$J9omro|Xdc0wI@%{HEak*fq#c=M2|f8w%JeET zF-YtpjyN#!{nFDR7n#=0M)0gHRz}KP0ag``=Y2+t=8sIYh&`!(+w;Fl9kv^9XBNDN z$Ac^X$PfoA`8Abjlt_}V8n7_%>$(w6AXTX_<5b9l5W^F+Q433nn?cfCauXZmG-N)Ao; z=EL!~d2n(~x=j|AX>Lzz!@I%;Us@BHChwk4G^naaLZ=hH+uwfspju#T2V-zCuZL`Q zO~q%~a6iCv)g>(YDreXn4=Nbep?&o zL++`7+YBl6RyB`#$U%Uf%(wO6T7v=`c9ELGe zwXa?*fgY`|7e3S~WQsnnsG*N8e`Eb!{;L8r#HuEt!Gt(l6?d93kx`8tKd=Y=P2;fx z*Wx{+i8rb#Nsw79s83)NXa`lq8ZkxLDhADvbGZGs(I3q@vPAbo5Bn{6!Ky$+oki~U zTlS+}!3Es;m9r9*WB1a$_D#(%yNJj?q>wke$aI-XZQ+_jHf8G!4R5;;Ixtzb%twt2 z-Tg^JYO8{_>VC5ZRwjv8bl2G9#b|%|-h6Q;qRBM#@gstFG@jrX!kywZ<{y#4*?%4^ z7My=-$MR#&Ls~YpX^CXMfj;X!fw6HivrDQ_AO$8i$ER}aL>w3pM|NjgSgFttm!~9j zovH%$S2#Dv_9fFT(yKS?cC)6w@nQd3h{XGUz>(yFJW7LQMxtx zyV(&_hyf%8mQmm3^g%4fg7}Xbo^OAupfi0=W@i3m=YxD-nrj>$7FQjOqx}-~v}WB{ zO{G!(T#|&^8LcbALfRJhyu{%07q@nmZ=I?ybuKef_3m~q$g$w!kF`QUifiGfmG*H{ z9SB!$-Yhz4caRcS5Ra>ghEmN=L_wfS!mp$x&WR5`B>R5RnS+!~2V)$sLzzM24#(Js zorAyhx}n#$V%_G4tD-eUUkc;!j}p6*3>>O*I}8JG=3XC7k9OjBWXt*yQ?*L0aV7ar z;)p$b!eGUaU6bkr+m%5dlOHz5l;B<`TF`)-e-%Yv&f5U7Oc$Jh?VCZ_k;V)zpK&I-|N7 zZDxmoS;-Ue2AT3*9?vttGCE&jc7gzH79_TIoupep&QUtdQ!!2a)-0X<8J6` zN}vUg1xWtvxm9B-%3&&`?Cx>u5+P58(JyXa3m@b9b>sD<4?teL_hlk%5Z@{2TIocF zgZyF9Tw|w{d3Gow)<%Aczpdimeh}o$EAZgVLX1kTFLdFZcv&OvV;%01lcqN$h{sV_}zuue76~qcUEqPPA*kH2gjjg31#m zIU&WC1uAtSHAjw*4z0OCKIB*s;?E-WJZplJJ<8Xsw3iCB5|LV}+1OSgPxfYMF{0{n zaUdq?Zj$bKEGJV209%DN5R4Pd3~xT@K0DKMh$9QZVKEq?h|6lUX^0Go0|TWv+eVrBPo%cwN6wt=_{k z*<6Rs7rMFR$fnQ=HvevmTiKDkzy;woHxEx86Z>+HWRq#lls=-{_?|HWg6sfV4P_Yp z%8M9di)!+am8;6TBgjF;ECULQ_c!Tf^hU7=#wDV&w^)Z7pfZn`+v!zM#}EIC5wgL_ z6;ftYY#V@p!)oH12Z|s+_WX=uJIiM{_az`xr%&Y;QGcV5jsG0hF5Q~OkB~J8*Ce@+ z*xZMnn;H=rCVOfiD!=J2TE@rrx%<0pxTbZDPT7lY66pL5cizb>CQxO@*Yhu^h@uJw zF1}8&r)QbJNgGyDI63=N<8^6m$6VI$N||@Ju)XknL%zAZs-%XOG=c}{n7OvBzW;SKt$ZgD1u0XX66Pr59t+ANM!V^n~bA^?U#Y1 zilR}U6{SK2ULFDm5C-BQ2rERg_H9P7@JtsZD3)N$-+ zV8u)LNEb&f=wMiWkwKpy8Tm&waQIITbj zi8y@B{AP`mHCVmbe2QjjTc-_9a^?AMz(e*v+wFGgs$=`_`!^=0YZDNC;m6a&#!3;- zMx0pO?zo!pUGZ2rl3WfGrhVh>C;FgRUt4Q;?HCgp=*?$e6 z)|72QeqpC?N2**zRt=0Sp(3TK*75Xk?8n1ir>ut_hg^`^=OS+&Xa^4K*L+Quub~4$ zOvRBf0&dL(_!{g8i?Oc?_tYcU8-FhnDAMf=k=rJ=eZ6|cP6FnFuZ=KO|Jy_n>-neg z>qVlG5=LeHB=v89`;@Xs*< z_EnXU*gd9Ut3IwzHT*ae+HQL-A<`i3I)w6zBH8!g&!7AZC(bl*e*D{FztlU-9RG)a z_B!)wQE#iKr$v=Yrx~WibW5>A_j%l8P?}8cY22a)I z>|CjzGcg}^8}rP}Zt}a=E>Y1GNh`y%pYOHA^Ex)K)7=G!VKq!=u*mpP3_W+|X(na(0Ln$2E}QR!;7qvIB?h7G0oS2^7G zhfYC%wWL+LWfdX@ln$CH)4ZYkvZk5&pCxY9UQ?KWD z+gw>070o#EDxueq6M>6g@7q5|I2kR}wq zYF)etB$ylUA_ZAp7xnB5;rLio90RQ*Z3aqG+v1uyJ^@Nbnr>`d6Yc};2A4ErlUiJO zZvZ?r^00>jfj{OBVAuZ75Za~|F-)fIw$o=H{x;_dfFU5TKv;;Xl_FFC5bkHWYO$e) z2Osep@C@JQ@3>zeG6k_x$s}T*2**RAA<(Y%v?^))N}LaTs8$Yu0f4Bo1ix@^QC1MV zfK@AFFU|V!>*ZZ!3G>c%GxwDA07O_5fC$&id;;#cKyeZBoiK)0ZZ0_n^T_XTuQh3c zxkQGj8M}tg0RR9`=AQXRP+>t(+>XzbYd94? zS1TmEM%~$t(gEL~c&KZMdxD?>NVNBLsO54Fx-k{Y7l8*T0YU%>nzLpJY1>h><#WzC z#|z)E6>_(fRS5;93URHD3=aXu%%>(|Fb~Wn_m4DN?vplQ3g;3~*Lr2lBWsy^fox!% z@t)2RO2eW>i+s_-Q%*Uhq!^E7t-W$F8{`;8z8ac5SK-3SLfYC4kHrO#o3s zYeiTA0!*fwp~;=AXfeE}^_LVl5X=Zkgh;4iYH4m5jvE4yXtNgdDPivZG|=%kt=sMOl3#Grf!jSSa;&PhFeNB{){E`Y88f)z-^ zD1>Oph`1k^tI=|OR#HYd7laSzgFs?IYVZ*u&3(X{49k{OTIBo|iVZx)T>Sj!KQF~r z;2$h7fCN4SaM0*9(Sr4(R-)E?LnB6sVGemj>sGgpZc^npaMxmy!+)%aM%^v#>@m^E zZ|Jr02sjW2Aq^a29+?N$2tdyqYON~f5*}n8SkJ6qge-H&+G0NSk$F}g2Z-Qb2Ef#a zdn`GJ?P~hOwQ;?yZTJNlrvTEfW?X@VH=~sRGA&`=v=xEFMl2kYMT?+hfuq%-ouE}< z>T5|6tO^Db%mPHX>!{SsMm4~4?jt_W@zAsZ4}6juv`_8|O&g#OOGd-gXi1D_2#bTo zgKLB9Pr9SHMvfcqm`59H)Tat?lKG<+8jBS71MbAo-G^|+{eZR?^37;y%AvKT&$(|{ z54bxCLaW7xwvW)ldgXp6&oAj-z=}YC;g&;)VU-g10YtDKP)ZmGF2D((fnOMahgyi* zGFSU=Ys=S=N6?IMk#a07UR)YlODd_5q>b|-WHkl=Y5$aZvv|~vth>1NchtRC&pJm) zqU<5Ol)soy*0?HntP^Aa^N2iPF8PiE2=mAsA`f_kKSP%?_n&!$2S|a&D%Slw_41`E zo|+o2?#EWt>`U@4yrO}AcTT;6Acm5Xz0ZLNs2@CZxRI9X>}3yS%J*IDNbZH-(mW)*7h zoDb)vsj*ma@THbaYSe9|Nidu<79@|Anv}K5-G?Rru6Mo5D`}*U!&_Q}vX#dTWZcwN z0?3}KeU7ESRlCGt5x^AW=HoITy9Qq;8X($bBPAwqvv8AOFyb6&D45hR8=6rw*;3jU znc$}aQ7h&Tp^Z>M3&!tWE{Lx6eKshma61P zCJlNC!#yGo2NJ~Q=8lCnS?O44cmM}Lf>2<=V(NujVA8DSW&j8(>|k1MK8pbt0oPcK z8S2rpl+c{8mN>Uom5PRexfX;pf{+omY~>ibr-gYzTWWN-d{`m`YY=6E=gd7Cyw0)C zEAz)f)0oUy@08};5#-E;mJvrVvnY8~H>noUt98DW?jOPw5Mf?)&xj^|guKX{s3nJT z0eG-@5p-A^fO4`cT{GGy=L^82U@0%CQiyv;6DraigWrOm$qr*aaouQ_Nd|LR?>(vwF*Fk~pRR;IdchUF`obz>l*N2E;X^E z(p-ja<3{)@%^Wp*j7q%Abpjyaqk<>^(#Se5H`f3(@fM-yAnw`2rZb2W`e_ouG$}qn9nA6BDX7^W3=;;2F^8ML)}?nC4#YP=UR^?&?%(Zl`1?H zsKSl8J4->+cF2~%Rhr7zofbg)6FT5voy^5FfuYS8ebH+F;nGYrv;Eq;>Rlj zhk=Qak&#jRsU5O5knyR^w`{#9T*-xH7KxFOkx~0Wqg=DDF|%0Go;UyLPTe#hVq|1w zWYk?0RAWdb%+mshPhJm8GNf&fX=G$%WYik#V-x`-Rey=J0OGfCmn(BVnirSaR4PNG zk-KikXzejNfp5xvE*dp~Q9PFMRhJ(lY6ovbWkU+hQXu08(|kVdMnZmUB3tOS7+<*! zGszys?b;Njhsuzkj@A1b&Pwnc8WoNyt-hThEZ zg&ALU;4v~TOJ(X@p~y4BM17ee?@Z1yiti08wncu#5^-hNzMS>ZE=wdZ5>j=?stZ80 z<5n%}V>GQ1gw8Tq^-pe%+*4Z$Zlwr>m4X`U2LTlX0pKz6+%m3$Te8_zpXU2^jMDcr zF-Q!G;2wwp>Ma2U}V}|etV~+3+JTqUEq&YIC@4z<*s@jTEJ6%}q0ubM! zi5>u9)143$%Do?!A4pSzd`C0Ujc`Bd#0|;fzHd|B-M=yK9^6!L&k1m>67B4zLFcu} zEm$8>(TSILmJQa)#NFK}H*2b_xRZA8aR*J#x@kQK?&4Ccx5-oZyGdncvjEhi!W{Pq z?>s2H^Zb^`^9_I@Z#mIH90ojYw4peH#xLaLe-_FFuSJUGA&-E@3WOpz`j5>y|#R`%8)hmzv zo_8W|v3f8OrDI>=8+hgj;hjT-Zzko2En~$edw z@om89f1TWouKhsDfkinWsdN#;weK}JK2mGkmM=J2^KOHn!UM9}f4_dn-S$!uK-PJG zLljDmq6&}+Jr=PwltL?Rcum?d>M{(x)>iN+?`DZcwq%c9w|IJw>(6C(;EVK1;jrc( z$@d3DquVIFu}nbfLE)P_UMjeoHi+^e&o|@W94hi|q%N~0-{{E};-4-=!)m_>gsHuG z0g!?_TE;78{IgN46=hQ2hT?%H$Z?L`CFfqbN1t20b-+Em zSxgcHPA)J(jITQE7=>Y_*YkK3GpW}|1Vpm#ph*RH%rxN};rSz`7OmT9qJZ>zW}%gL zCaAb8R%hI=D3qtI?!!dGs!-;)3P3ih1yZdaNVP1ihTT~4&o38{xJ?%RWvhqWjq8To zT2U9VG77SgaGTZx+YCqJ4QVuvfB-bVz6MBtb6sWUU%BAy*>ugDiVXk zJueoI!9ks{q9J zztlFE$&;@IB57P$!#^=RaRkSg%Yu1uW5M0Lu4r-GFBZp&?eT{J51*|g-^ixW?9xnj zvp2+!k&5yH92wEFCW*%NikUsKczV5%tk=i8V`^^38wqO%BX_GKtsmV*1Rl8 znb_Ml47qF94xPyM)seQ@mVNssWd9`YoUz*Ld)P#=C{-W0YJ)STGeu4)zeqp#U>sL@TygV?w6~F z+@k^{YX_a{>&^PWVY!86)E%Vk!XlU{_w!`Y(hr&pcx0N+Spp}pP(9zQ9CA-@$-DI; z*?ao3@xqO{Cd!OFpQgMS!=Q-ox~H$m@2f>9{8p@yWx~5FWSl)a;|`yajng+9r)l#2 zLuI}ei-1`}CYh|0OmC(sSDDe+5x1>ZfTTxm;2`lju`qtSc1U<8@17S>dG&5Ncc54z zlaeho+T_A&mj$xUL(=;7BS52(-RPArTHhlh2JP``5dhb%8FCNG0@)O=`TpFE`8nA@%mWlD>?%SMqgZW~5@M~uE zdbif}o=n|}RruN20#dJ>+2fuSOKa)YD30+*KIp1Jds_+}2o`~bt)fdUMxOV|n?r?+ zTP=LEMHHdUZpf`4$h$Ll&A6i{MgH$aBc$L*J17WKtZOBJ_|SY1p|9$~DsR5+xL9@s zB3W<(9zR<#=&oKduBl?w zW-uw0mzzi08+{N zsAP>)BZR=iVr~3v`JnsxvH`am#Y%)gU)GpyyqN06G+1Fed7LNK$H7x`Zm<5VPg8Bo zqA9wq?s^Hg)sVYZz+=r&!O_+^n?sO}?KN{*eE^B@{(3p->sJrDjPQA1w%;ASOV;hu zpgW7O%dWi{cd&pjX~Q>(d9zgnPLHwKxFPt46!$d)&RxE0&}|lF`u%%FZf8 zG5eY|4ec1^wXI@0e8z7OoF=c?bVp2JS}uwMgwYNzsw7{H;4cF4vcjGerQ?c~Ma$#a zEqRv{j`o4__`oC2F)F3Swrj4!&4Z=vu52-7`RT2B_uF;OO%~zb6ZN{qGjgSEC$%<& z6{14P63NOH5RhLuF(W{fagT16g)f38$`?u&RwLJBygPQs2#YUj=2an6a09YxmWl;? z{lK8xRqp%Q(+h5XZ(+nH!)7S?{Qy8#w;VuLuU?&MBVjf0@O5IcBR{)rd*0o$L9CBu zL+%;Tq_PupF5XM#cb%6U7ENrL;bzwQewxqNG3q-;81`e|#ANx-tX}6`R|g1)^ptpE z$2vf@HWE^@hK+&0T$}gSDCC0A*(E?~Xjkz8Nv)~%}xKvG?yFzpJ~D?8Cwu3TBwg_V4s%vnxuieaw@ zx%!ujdB0pF40-irB@Ea3$v6uMjt?pfIlnu5?$n%HFgfRT>&?2b19Lq)w5gqS$BV)@ zB8;EkS}1Ps=ty^GreubZrL|37J|)0+gPi+~>xX=By`kR|+NCGs%bZ||FqiqFhtOy| zk?^`No++ySReUNhjXUOVVsBtZxf|EMLC7?4nLu9pxUApJQh*TG+#z@@~^$>2IIzA-G{}1dMDE!b zR92`Lpk%F3(RJRWjN7BXSn$zBR$y^lEu448!** z*tI=tV%8ltCFf@J>=-YrB`o!GD+VI>OZEuS1SVTdUk$C)PE7As`@rjqaJd>s zh!L08N0WyP!V@pF6+p@r_o@XVQ6Vas=S;_{fxO?gPwkhrLq)WRDwyg<-AyD1p*&3M z%ec9db8g=W_3g+Gd27A!3Yz;Z>+^1{07-A}@Fv8l)3xu?K9pkZHn|L6AoPKad4FAX zwH}Cf({t@}1V!fdXMH7CUgx98jc)gLqjo7BnJ6k{p=P{03uQZU4{s{Chr|k5Gl+6l zU+eC%0+6Nr_x$tE*R(z=3xeWj=svg1H}lE6=e8C62CJU9w?QSgH$3TLWeVkC$hp}F zx{1Z+OpOwyN`52YK~-AMz`0CUF^3w}@H_GIr^Wi9Y!6M*1`3V+-fq3l&Fati^k6j6 zyx~TdFPK;ghL7=}#|Zwc7X>($ir}QM=9U(DvV!o$(slsIbL9XetqUs*@zA{IG5HSw zp;tpbDr#iKjs4LG>%)Mfi`PI*Sm0S1`-+g;N34&Y%rLK{QJ6xP6qR*=KHuBuzy zN0EtpG<3G*8fAtZP`PZe0OQ~gYoXxo7E63fKAk0%2`qH3f104tY?;?RWb}?`GG(i0 zJS-&dx$%8y3O*J>8J-Ok4HXb!9~5nwhdLt#eUQ;3>32qvZaFTqStqu5(2<3tiV5xsP-nd$Pq}xsAzepJ1Y~U zyl)?p^&rZ}>g~n$7tH|3(xpqaANtL)tZ%sm0zK{jd-~}eK%=%Bt9aR4MaxJF+2tu!GR7kFFPwk;@l~Z9M7zx2FKe9=(l8eYA6ehAb9PDeJoyXJQR4(={wI z!p)W`?q8v&$8%eY@xWorEKN4n@MlO$V1DVlt?S3a=-rAR(GFHf#`mom_H7aiLE)ep z!x|Zz3(*b`R1XEAiKpxNUcIaN2?dZ!Ef&kU9>V7@31~b^e~*D6SJ$tDLdka>YMYhTt)?Ws@HD^F}8?OXVUJb0`btsxa&J%8@G=SIl*b^*v! z{=cW5O6|f*zE9Bn1^Sr}6|XWI4OeQ75(b~C3Xq*4%lyIkdZ0gu5Q z+NQ;s1T~P<7DIO}2X1I@#`y+HJ*DoE^>6sYawQ9EAZmbXyMS=@trPHgX{a$hlI<`P zbIx~PEbjidk|y1`VBAQ+C-xd+afH?6JoVFMuljK(ayPkP;@F1`9r&cNi^lzHk^;NED}bs`eBD zN)-1*Xmnv%FM`oKMyAvRq}b?aVy2jF7X_wxtzHIN7;!K{!z#FK!Z-QCaL#*DY3aVX z8cst-o7^y%m+Q<`C<9ceFh|AT$;EE0TwHXo5M(!|l_$o#V|)y?FyhiBQ9kd7(^1IB z-kPY?p)&GaFG)@+_;$EaNx3S-?&BJYiyRA&Qfv^Afwj7xQ*NY*83?ub@y8!`#~pWE z=_IHtU#TBR_~^|p=BTtAs_3Thwex*X&di8HiV7yho+HI2k!isHZM_J_=op#ESD~e! zeNMBB;)X}rTE~btMo}|vdx82f$C9oP@ee$ftZ2WYs=a?_4x8 zi;wstJ@#1LE-W?Gsf|<~9c#F;%zxFDl!+Ow(Y<~J-H02Q`QwchBHv-L@jZYlYXbRU z)Rq3LVTSNBvrs;O9+ecR;$Evl6G8MKrv1oh0LVl9_voXKR@55HJs&YqaXpu0otI<; z-CVfi2thI}`kBo5o~F+JL8JEKM^YY!agah;yosXbm)Mg}J~=|Rj~almbFtRmkE9hs zjIS>tTw}4!IGUwmY4*NGtu$8*q83J!bFkIZ zl}1Qu|3{%>3UBy%->@*-*k;;+fq~Hl5CH+wyza#8dFY{sO2Cjdm6xE)B)Qn!h_tlO z0;%lAGRwR>3Gmofh#Gd!kebQ{xt{DW14QFYaAaMv97%tJu6gEmpiqjxx~#30?qr0h zoM)y`F>?r4Rv&yLrg=6~zl0=~9jEZ#8O!#zTOiPb9z@$nw6I&HsrdnqlTN|jrJQSFf_1s1*4|cFKdK<|B?nF@$?Fb+<<@$E%X^g>{ zTgn6t7PRcQa~WGKU1WMhBi&eq!bl4w>8lB>4^reO_k`P{(g#F>SoVXBoOz=Fa(^WN zsRbevg|m0>jd#qtsPxZe3Oi1c8R1>;mVv{Hx$4yJ2}=8C(k`sx2NA4PoSlf`k>6qI zbs?^fJZqN2Y*8g=)(s$Wm1|O+0g!yO13=pv>JIz}EsrQpf!}dM&v4~Hgg5KKD_1$^ zdIBvwO36k6fZRWt0CKN~tOp+~>%K~YNJvw1G!~+VwOSg%w%KT;E6X-QjRylH@*VOh zIlVDo)~2i_8;gnu)qDj&Ci5acKBQv2J*>D8MT^{fVzF9rBQzKlD;95$=cdzpqZm~% zOTFwX>^RDN)bFnn3BHT)4#hOH4czY_8S36fPPwPSOTEuS*8TVUE>^Q<&8oVQ&@Yf| z)`QBFY~#QVRL59VqyXy1s8jNe_!sTOr1~V{wcCH28cW-ZU6RBR3Hy)>Y&y zJY?JqSq6jzCT53S|7q{}>eZ{=Bab{XLZ*)jfXE51lneRsnl)?A``-7y=l=9h|8$3q zPL(zlQV(gbxG#kNNZ{$f^n*Ep|+tIc|CDsro~7mEcp;(a0pa^#ep+l{*A z^w98%wkE@OVN>*&%?mOw6hT}d*2nbT#;;?$%*D#VLb2GSEt2l@WhSfd7J@)}xsMC= z%9_|+tnxkR1Cy&iWg>`Sf4|XFZw5f_Q~=S!ij`7%!^@_i-c-5R114tNya~nm$`=AK z&Wvo!R&7VRt&M+_QTjHVl@(eym{qi~ zhpRI1-##J)j@u<$%D^fYQ>^ADgeXw@{PrmB6>lq}i;%WcSG>XSO zv06zk$(2eFq+19SPz(U%&QS*tVQ>nlTtg1=-FM%e%KFgBB9OlSl&srlQqC9u@~P{E zC=pC|RriD#b(b;1J$_!mW7+n`?<8{ZP~n?n1VsAtMb}ujY7xGqPZ0&&z9Iw`P0M*L zvhLsU?hM+R7Tih!=&ej|&RDLiERP@qE7~D=9+mZY^^4U#3jmo=%)p9SDK!zlj=KWD-QXlt~(jf}E5Q8w?FRx)L)2W~S?2LGJT2 zBIKUlS}2xek2UBtJRd6K$L*4HXYShLgU&9FI~EIUMjfBh8%4uDED{SyMBJrctntY? zx4(dhZX!(M#}2!9t{J3FbmTT{FO;f17*64PzXTt%}0az`ZqNrFfrzHrar@_LpmoCD?J51yQL)3Ug+3V|CiDq187 zSC;Jy=}z)7KwxH2`b z;sT0MR2ZQg`ESUdP(0>}>^wmDX0Gtf#9Suei}V&qD8t7JKsUM-=YofG8r$)iy=2I`)!S15J_4f zrRuvomM_t&yZ!Qb@Dwc9tWR&Rm)Lr7-+lLuy#*rRuuL9)wsPf4cgG!fi~x^ z6LCY7IbwwzKV8hQ$yuM?i&>xdZElO&WE73GIU=jD*j?)*27lZ!+4#P#5H0!MO?khu zu%rT{2pESvwVSBwr;FBcv}hsyu`4TIh)V9Er~@PF>b}DA8W6K#7g3rHnv!*k1VHwk zm<=}mr8xJ@a6TuU=$;io#sb;uMr_hI>U8&&8wnm!p)kUIg+iSP-2VfjButW53q)SN zTzG%KNyDyAzdlmxA0$4p2L#z2%kVL?K%g7E9dySXRb5z7(1lJr&CMV+b861Le3zV0 zW6Z^ZAo7`5g?RIzx+tO%@pWfd((&Yzgu*z7aNAk|j{7zi+{*1cZal62Tf>Wm-M7R- zPMDGNLYQucL;2x!@Jxqn!B(YG!9n>qEC1~$s^jqj9*4;J@7kB%hpq8MXh7D*DzU(x z++1+Ww-v3AQtDgA7DsowuY%{=e9gt-5Lr83=@#qzFaeQWa@o>NP8&~z?z!il5wd%% z0p!+70Fu-klaC}1;J$jr%$z%MM%MMoq~(VSA`GGtyNoT8K7M>zBJ12Un?z&Y8hHU& zXuw*avmtvXj+xfuPTj3np0mKBidC;W1xyEHxgbWjJ4OKTq?x^L&%SJ_SA4z1T$0)L z480dy!Yz)|qN$&5QpERVBx?{RzdH7>dX0J*gj0OV%= zyZ!dt{YYCIOG`7Ld>tmH*sBCYPL?4`r5oH@@Kr^lOcVr0qteDgvp%}W3JIRmvw*Vh zxLh9Ely^^WE0k(MrB2n3TYVB&IYhLuGj`4SI0ALD1_q$OlDiL))?7t*s7VFkZu$!jGSy&XsUgq(OBtR8DE8f(8p@IzYmGv zdsKjApfIeVXI$+9no$4?GsNs~fB=X&WU9tHhr!d~EYY+7svGm2k5x<`xKajkkwxy|tnn zs~m_#!D6Q%*eRk(zF{v}T!OTHB54M;<%^a`9CWul-tM+UeE(({+vNGv0v?Zu@LM<7 z+^>=kf0}6XXV20<6DEJd-hE;<^|-nH znKHnWj#%}ZMY(!Je)Ez=H8(1NBwbZumVmz2B5;u#uF9IrICoziop4H9`fI_TMO65dyED|{ozSnk~2>+E4c zA1Cckk@t?BmUFKc@K~~YuTR+>ar~6RAQN@ppWn$=J1)~zea>`t;khRy35HSm+Y zLi918&lmR#WV52NId{S|SrjhkC+?n2k$Y-u}5{)wJdU~?{bJ7)$PSz}V?XiV`R%8-*BKp4=t~Di+8VnX@5UI%eV^WF{6%*^w1scUXb7 z`~*?dTpKq#MmhmZ7M0w%m*igF@lwGZJf+|coD%uHK>eA_2!N>7GbD;NJNO9y@V*nB zo7qcgkeu6V^`N`y#k}xM2 z%j)IRbM6mj_KF3PEu~#5uV+(O48!YUIL7g~fX8)WUEQ{!7(eRoi#H+qg_CXECO6(~ z-!b9}h*8`)DvDztBv!C(At#E*WSWYJ0yMub=Z>11b;nKX@hQ90dom+6h)lNhvz7Y9 zRHC?eqRQ%y1CZYXAfNiwr`&)4_kY*r$_jU!m!4*ezT=|>f$;+#I7vX{rAS7>J+#HS z&7^E*i)$v^Q*2IUB^3@L5;vY#eycq zI|!3Coj~;2&we(P)g1>QACqy>O*h@-o_gvjw_w479Y7-qBq4}o<60L?ut(LXXo(0I zyGf!|PLcQK$m0>RC?DADd>)yb+oe!FuIZZ5dHHWqxQ*7NfE z#Q_lrL(Z3TATu=Y+{pbRshUE!M%l1qr2D?#3nl-p-&SxptsQcEut7?0Hq~~^xjkzG ziHr9a74zc}-t$D5&yhuW=#-rAx$&S_A$Px&cMk~PJR`?lx2@n)@CTzJ-)OOhfn&ivEd2hhWrObK zb$P#;CflpYB*s^rc#LeM&BKb-b9>{>ukZ~@&Ai^o9n|mK!TkV;bGv7Wn+=!ZANI^- zT2h+l=9_Q!MHGcDZ>Il8J1r3C2QoZ>yzOmotK!#nJAlUsZL_r7cK9=1L6qn85&^0A z?CWuhXJmXYgcr6uU*VP1MvBVOv}m0yGIHg&7vg+{VGys*7?~y+E7O9{>`cd?#D@si z^&+*uDcbeazKm}gh3~gjo*(2a#S5ukN^I?vnLR!|{As@hBV~7J z$Yt_{L^bBtW(oozfsAe$wHiRK_kP;n{mxf>JmQEWDri=jia=7*+v0s9nlFu8OM{5e48BnmUtoy%Dfq$!6XNyzqF!&vO}u*X25-Fa%V(5G2v3DY0a4zF2#P2oPeB$0 zVNp;PVF7nnb{Dwy7hOwjZB=*AyL;yR-*>iqW@n~mx~tFM=bSp{G~wI2d~Tw>K{-E> z75p_vt3K~|l}k1!K})kYZrsQaT2OU#APIyY*ggEfPAx7j9<+4nQY(Sj$U}Wx{lbe> zRq;iPH;H#5?F?t$M5nXw4&}L;1!d8k(9?1kg9|WT6=0qZr zXvNDJv<}~(-BpK)6DP9kufN{ywVqok!iA-6rKqoy@6Cx&jgxR8;(8&b^qWM?c1*hN zQEJ|m(#=xP2}HK#7G$B225zb=Dk_AOmzRreGUqvNp)bCKNk8z16k7T*908#02$&w{($BrFV zSCoFIX>v*Vb!(a2U<3kzkS+^E-LLMtI=M|I3JVJvR>Z10ITQrK57-*MS&W3j_uqd% zssrs66Y+X2fj}Uni#XJMHTH2bkbaUplC86_8WuP3(8Z2U4hMla7=u5OK-`plG!};% zKzAb*vO)+1Lb|LEU1zD}Atl)WfvCDTYy>h-63F7ki(L=2&fsn&VloTK3Ly{(L9jkd zZ_uhL}(r~jt+*sYqDj_R`Kp>==JWMyZCK5?D zKKS4RwRP>GFj^qMtM~%~*|cd>%od23iRcv%Arc`F2+1Z9+s&=+x=OY_Dkvxr1fuHT zP!b3~P{;?F^X<3avPFv)c^-T>5{YFM77+)5KuCr4q3bf6<&la8PzW3B-n|>Ua~vHU zP6Bc8GX8)-ii(P&&P-2)kQG885YlOV$ZfLq(7t{9MEZoPdqYhi{D95po7HR9tYJ?) z@r204bhAKAR*2~&+2S5<0d*n~0)dc9>qB*2rJ`)fh!G=1AzSENNB4%CKpgbtQi#=9 z^XJd^WOAKEyz)VGbCGgH2n0eptdDe&hxGC|v5!JW=O!IFDLTkSeDfmqK_GrD1zS9W zR%H~MIaDTO&)G> zk5nfO!tz+SaA7rFn^6S95AH6$`4=isf<(MbLO0TgH=s@=LLd-QK^}BQ;Qh*vCxNQ6KjB$7~U`5vjVKG1DYabJx}rPh-#+G)DNwP;XfEc!z3npvMRHjMr$48TL;VhD3>t&R@MCSL?YDJ3B`fd7)6d55?^wBVrMj~9`BW?O zly?s02lWfSnRej70XApO99MTzEPX*t^=E7@EplZM2!v>?4_!C8xvx^8S3n-c#l_g2 z=IGYUCJ+bD;m_>Zv#VkuCUP*Dj&3C4wIa)Fc`p$Ofk1F`U732Eo8lg+ihaP7qw3VG zAP|Qs^TC|&zWc7H$>_)O@MDGe$qgYAArJ^Q^3X}d)aS!gZ6g`LVx)ry4`Mez9o9!W zEfC;O(h70wT4+=7URH*UJlq;m5s4581UFq{N)}X!1Ck2;fqGYp_3@{49iyByo}2={ znN(6z!rpo39bZdsqltsqtP;Nk7l}j&1R{@Uk_A)cx&jbCE-fv^?j%Q-W*31t_yhjH zjRg|1oyb^-#4VWaw_X;J2!W8j5tmT?=Q;n#B$u zKJ0o@y;fzpSsprhsIRq=6@fs=ZX)qxd8E>H1sw_UP`V8krt=s@(ycHfkw_!HLDQ4g z&ph)C8$5V$jT50K7pl+May+D9Ew!7-*O>A}+{P!wj=et}XXkxdTI2bd9qO_^Epq7q z(eX>W_2HK|kSr)A6?o%~H`sH}i3L|ESJH)lzv6TruiOBfuwt&DpX9q!;l{e=nrm2_ zHf?IWPED*aDR-|VLbqf^DVQv^YSl85lOIqrmQS-c=a}tgqS|NDu`<8k@A0E!72RuO z#R}66tX{1`Cl>wllJPqV3ky}()syKakF>k6loOZFH)~g~Ud@IK8DcaeZO=uM8)A!n zxFs2?V-N_0?65%OzR3lij)Xzrp@$w~Xv_y)>FCU~BS`ix3S*UmWxcY}<=uea6Q|b2 zGS!z+YiQ{ciQHENLiQ1dTYFm7g_WvRSHfqtYnQ4k1A&|Xoxn1_`3U=pndV9e(OMyX zD)stluS*~hGLJmGbVj;g*Jr;i+6Nd#iRRQ#DfeLCB_cdzvXdyz@3+3H?Pdx=B{ zgv=!oKUPPoS6u-`MMZ4Jj2YE*BoT-i#et3ll)?BbL&MM)Ki^6$Uf^s zee4$ZkZ!9~G>MYH4fRJK3FpjqM@JHYSWadM-wek-2n3~y{oXlvnWAp4EE}2F%v=J2 zkU8WLZOkK834_3ApM9oUAHy9T85raS@TAU*L?VlJ?%df2By!0mmsBlhGbwMcfVylX zrQThrL38y^LN4Jmk%ij4N^S}1!}ML{Y**Pa_WN(EufK0sJHq~Wb{*C#FNa;;IQILa z4aG;#l~i*qzODS@+Xxv9s&>p9c%~RItO9$Bmx}{IW?d>&-v2 zce7m9{FEFz$tflelhu)~LWuyzgZ066As#M*ZVcLS3~JWVqg>W==B?Mgdv|vC-FI7+ zwwsc-z1C)as$?TqY^iIp^~NzoBB{r?uaB_b>?pVXABuVKS4SL0pTnMNdkX8^sFv@~ z|N6(H?2iRU*z`m804e?_|Nj&xk|#Ty!s_QnTt4#{`^}eSY+0$h+vBw#Dupoq{hvRp zHha9?DRjb}tRqF2VbjCrIo?jgM}GiT1kZmFXa1BaY|p5SeKGYtFFrcGmCe)*-< z)Qq-LuUjl6mY(Y5(n<~X{&b8D{=C$cJZdWXW9;%ZrNUbAgBANKS*OoRUCBeo3wtZs zUHp6D``HL-!1|v&$wSBQzo}sVwW*9we2UywZY4_Oek$W0n>TM}Z@&4ad^pU}i9{e# zoajk>vugeN^@2paCWTv*C_k}~c!`9NJd{)(XG7MOMWxtPz#{P?5h(iWH^io}K@Pv$ z_>5pP#P= zl4_8H7nwx6D$8#>T(U+|ga6&a@8HX7-u+#bm&4xfcnT}MwtbIRQWE>-c6Y3YCFG3JMB*<%0Mn5?gD^SS=BzP=&cC8Q zyRl{Ms^<`gc<|i1LLr;*1uQbzIZm_s5AT0~E$6CC3;{a)J^PQbW3wZUjUGMoY&CDT&caDWPTk&~s$7*GK{2S?Ru9MPtw=d7%<((c8i+&QVqc?UUWE6EW*G^C{ z10;*ms(3;{DXid&@fUt8^d+N_tl@sC#jzx5J}B3ne!kEnkL3T#s?IxBI}{9tzKnlu z(!a=E*nKpMinihZQ6wB-->Tmolku9FV5{dYU%d|0t}*0ytb`j}API>}Rw#4?Ez~^6 z#Iu4s1P1T+qV>Cz4(|E8uznDx#8v)~04f%_oI>EX;PwSkCktTjJ*e zPL+5FRP5DW>xeA=J^SujY)-Ya+Tu!A8>EVP{BzW`r5iP~n)K)Dwc+TdE=f% z2h5yeZWXlE4VrVO6($EEuTlVAzR`K9i^beFPSMoVR69X=2bnDrCs?03OSB}Wlz)j< zy%zC{)knqHZ;%|0$G)PME%L7RjrLPFRXd#5eA_tf6HqL{FnMo;iKqD3(C_yH@lHOT zmVVsI>}P-2RkvoDf=GOI{eQyACk{6g{bQG@>6E#Cb;P~ZYKNKR zyUYw}QJPKSI6B3ufgIHYRcK6RiTZAFXFH$m5c!4^)xi0epu4|x)sg)27#^eePb~da zB@N0#rU94k^?g-8Mw~y(sP}Mo>-j!XWI5(gN$wOH$u^i~+}Y$!ke3Um_-cr8gfK?X zevz&wmD24B*o`xuI!l1+9ZD0hROZIQqBM({yGm^vIIR644}!mbcwMU7NXq+FA& z4c)0*3vsJFUP}A=oq0A_$^LM;*IDDKRC4$L*+E~B2#&UmaX>?W=S<_w2UQl5B={;o zEzikhX_M4vE%Qw?h{8)B2KJvca!HNGYw=gko~U~K!zVtGy2zH`@!BrPy}vrLvO~VQWO#1unBCZIP=n}uA!ZV z(ht0)I>EQ&AJjN#0@tyDXiXjHO5C|F33PCb?nmNyQI9^W0a|uc5j(^6o zF?bSK8Z_wAy!TomB^3IEElVO_^Ma+1c`3wxriO2~cm3<{Y@@CQb!jyGU?KUHP?9)J zNy#Zq;I_K~P7g_zKvWwoFz{T8yw4QHP6>O54Uzu+DfEv0ini%5|4s>{Eg5>H=J1Vt z5o%IR=u^(8qIBra-V-t2s-BSiIc8DR`jn7GJE#J|PeuI`g@Z&O=pQh@tc3h#kqtB~ zk-8N{xH7MWoeT*OFN<-%^MZg;#UbmLw(h`mo9Bb=*X`^!nLkjr4y57uGkJq*d(Z8h zzH%;$cnGr>Xznl&6BOgpK#p;B)62)D5mFaChDOW|dQ7gZtxdie$-X7$G#BzZAz*B! zPEZr0`~bZh=3C4OFWl2hA>Fb>N7*YO5-lbC{FzUmBJW5RF%fQB=Q~zkAcq9B)IkGT zs`&du02HgC_-IWif-rI})`fNWVc3b(quR}*GW&Kust#`kec=)9%Q<83s;-K`Q*^s0 zUo86gBz(o=J)?;F*zr=UQWJAS6c}+2Q5nJW67+dBmQ^aV+WuGk$sWF?t5;R~Ft)@g zr-U$xZ%dL6+WQ49`{Y@a_xpj(k4WxS{wND~8t(N%d3v>$96lrgjYX9K3KMv87?5#T z=;z^UGu9IMC4h9(Zo}%t;+M09fl&Snp3NotT-VHefny0C$ung&XjMsjJT2$*3-RH5 zWDUJCQtQR4!%g@9+fUDmyXBh@u>V`8?Kt9b(#iCbFXBv6(|Z0bC`F2sYE{dj>zG+1PLHJCfo-}y zMFl^e@jw6f)41t8gNbx-PQm<lvsGJF#S7RMgTFmCN}zrH+V!}PaCTjunc@U63^LM6VM z)87>g*h_`;HRJD63MLa#)?I%N@d3M~=EO5&1Kx|nWlzT4^X5ByyEkII6s74u)86WS zww|vlCW2G_cC^xWrqg`?HvPm44ax5eBA9edY7=*bjRQzqPkp_P0F{t1+zhp4z2RQ( zwpfl+&70hoCi-Mo?Rhjv z`|YU;QW1lu_yl{7YWwjE$(>4^6+5 z?y1k>5&ZD-`a*-P_ajryAs7QyDKaKvV>HqA|KpkOU^A{yg2-657?u(dZHNFPK-5mR z@U!nO0Xk>?WoUvd+`p%TPVLj#9B!OC`1Q(1z|dhZyxf(PgWB zboxNLjzL+|DT7hDxy*B9(?A|=6i2q1bc2!x%nI&Y$8qbU82F>H?uM}~2cZe^p{B0M zz~I|U8`GP^967^zUVUx zlcV<*K6gpF!IeD5?W{grFR@;m8?XIdXje6IPtpM`s&=NGhy0<*xa0g10KX44xNjwE zt8@;~=Do_^QVQe7t@Bjo%h0kV^s>)yAG&pm2MXCufDbjr$mbI`)=)-bQ^vh3{YgA} zk<{D8z3Q9tz~SFN#LY)mN?nPQ$mR;Uhd2yW$6~NC*ivUV^)B{!zQ}NVg~(#siLv3* zL59bCFH68bXRT8-{Qg#lT0OiPV7Z;9{~*_LRatJHZ!y{V#$Q4$g=e3~%K#cd1XBkc zFUfsHp1Gads_|h3Zw|$R82`HdtYQK`p2>w6v;q864k9` z90K7y)Koc5t?bJa?<)y5hZyVjJPf#4Nx47C<{2)=m2!aJx308Fr%! z-XoLfaf8&JoM+L>8{d&>3;7k47y6DJ1y8@8B5_$9d#8wSMQ{Np#a!Q8BMXiaO5UNn z42cqy#Wk|6zQh^s3X~&k`ES|GUk0MlnTnNWLpkI)5~}b?dF+ZOb@}yV%azVptx|VrL77jj&<`yoc_fuoo)$3d z0W{23CrOG&p86QRup zqiX8EShv2Zr>T~RoQdT7IcjETfVND9(L3e?CSq~N*rK$F1+4~Gyfzb$fzD)sLz2#l zQa0gt=^q~Y-_<3SEuUifA8>J5#HC@BHJsJF*py{8{}yg}A->S+V|Z$`4?m@m1`R9@ zpIstAA7}O6aGic4x|B#PY$zA=rt*rr1&!h$3NjhYQvl?cDIoK-{f1Kc^{==AA0>6c z6xpL^mN-bbL^cOSt+ZF1d(1Xs{rD*G!kOc`Iq5NVxb&jd^lKZ>CEKqVoQhWK!=DB8 z+SC#wcAp`HMN-V*BYM7C-|^dw9cZS}(7ZL&hglu(pU8UFG0RYV;MIyUvMu!2@K7)q z;f@eYa@_5D3SM3aV=cv1WX6!sp%JFeP!t@e6fNeQAFFimYW1FQ6mc9D!{|JvjLW}Z zm5~6RPge1_fM`c@@nK#m4vdgQxbWZ`qb{#5|B^#(CreFWi4n?(Km;Vss6Se9ER$V$ zl8$Ts$9*{{RvqTeAEpk&bh2@*cfSFW8iFKV4W@Hl`x$YbJwy^INktqlr=8V}bu3XZ ziSyVWtMF#p$oyy%qF?ki3g;sx(<^q=fbPZW~jF!;@0O_2HUI<>dPU z-=DNQ6$9MWE>6|d*2ANtl@~NyFd|Y=#PJH_xtbl@f>Tc%+xBKr8AZBa1upFB-tgb5 zv_%9r7OjHFoqZwlE=z)!^#K&rhBf|kfUgX!()0a< zZU+TwbY7@0E{OXE)y!4f(Fe~0&@k+u%);lIq;dhp(2y!cRcB~ZNhyL-$3lx=$#5+X z>AW6FG42^=1UFB8=yhl%!y@jnOY;t#GIOyvi+&Ws8~4co4i5aW=t4}h_PZ_k7vRbe z`E)a?u|#Z=J-RjB6c0V=wqi!u++nm$?UAb%&SmVQVM-yQ*PV(P@4>PE%2i5O7HC~e z5kL3mvgLut!KizSZWNpxh1geox2MG$<-|8C`5y{rAXxP7>EO5xQhZw|l!rB?&TmTpOQQ@Vl zJMNat;pHEJ%989#s&{-B8UqV)W#mT{hP4>xG%(5{{F5DQa#pIn*YQ1))-$Qd;(H#!Ai+D^nwgY=JZTK~i?}*d(33$A7BJ ze|1Ds_zXt)?qzX-vdzN0ze+VtHf3&;kjomrHSJ`}>Zhrk10K7#vXu@0h)r~I;QblY4?_wm+Q<_#k z&vy3tpsQG4pFKwVT)6TfRR{|n{1-Z-4^++IhIV2-!(POIJ^(eg4^q+7!QhbALgh~O z;3i6zoCoaTxn?c?_cdU`D)R0YUf-4eJ?>IQ8+exqu5(T%8rt+xV)L_-)YfK?KLRXg z@S;;Bj3}y3LOE|TLK^()yVoOp{^48weAPTh~97j0+df zWF3NzQlMcW>xl~|zDM-@CI{z0{cS45O$ZB$9lm$}(QQTIAfj-Ja^70{Ne%1VTjA2+ z18UM@9DQ9?7c&YBI-sf^MRD;mu-jV(K7;_odeH{3e-ZM+|Eb9d&~axx5N^3(T}_Ns zxIVc-gU{{i5PyH^J@`ju6Rrd`cB76Y6zv{ec4`fN1LAu+5Nmg3Qna(JU>1jvD)j+a} z%s98-Y1fM`A}k_r#uWA0fhe|xo~akd&5b^^Y+ckZH^;(a7Z4?KJvM_L$Q!{IYi0D z7pWAyv#l#o4<^dGWzxihd29GHF_%08yM-kU4NW-^88_qYmp3w?0Ck!z3npx(nz#ww zT&se}?-R{Z@sPlb7bcY}!t~7igl;Zd6#&jN?&7;WyK~=GeiqdquQcR^mp+93ty)PbCA71C|lDIIt)H7#R-~i4Ot#X8yI|59>QAUdCZe*+mh+?wGS0 zjdsMmL>3Z6fyED%G~szRLz%L}+mSSmtFhQGYCO8dkRXsWM*uNtz)~IWvbW`36Cn^B z5~PlBpNjaRdsaccvV6<1#=h74c)g7UcF@M zwkzs%nF|h@iDR{cy3tqlr8f`UxO*%`GzySW~^M z#5x!e`zcKhz&o~)mlt|2Snl$MEoH@+WIOMdDvXFNH$nANlebHKx{4#eN8z=?>@pxm z&+n=0l>pZ0k4*aYx7m593{&TE3InRKrK}nj12Wpi3DPl8E z4+GRbKBmgk7JIwhGhOS5$~F1+Hv63205-p;;M{&A-0&IIfMxFE2GT_SWA#6m@tcA3 z^VAo&C!4K(v(<$Hv$5cy=~UylKCir-odIr*bdp**R9YI^bN&|^vrt0S48xOARU@zi~y`SXfD4fe0$GV;}3EI35EJb?_pv+h9T@?_{& zJMF~J9X}XkIlnas7es4*m*Fww*U8jGnVDH5g%yyCgRBW}TV7p_6f@U6kH05P5EmIq zrfXop^%A0a8aF!noXY3M1q!2Nr8AhBYCi$>%aFP7NyAtOv|3LpYqwVFCcz&^vq ziN5T$L>_8sKD{TYG3b_?$8G~UF6#JR)s1sxmMOb)Ja`^%7G>@BmK4~|vfiX$Zs8xh zg06}=jTu~J;C&l|HH7qy{2I6h<2T|#S1nX)?9Mo*mFEj!Up4EPM6ph%)P5O$l61b& zZxy^`uSj^q;r=Cl3N~TVARs{l98#=6!AwL1xv4uRV4`@L~rCu0KOqa?46E|oP8`X4SC52647 diff --git a/openpype/resources/app_icons/celaction_remotel.png b/openpype/resources/app_icons/celaction_remotel.png deleted file mode 100644 index 320e8173eb90df65e0363f8784e4f034da0c76b8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 36400 zcmaHSWmKHa((T}byOUuU+&wr98Z=0P26sYm_rcxWHAaA7L4(TxAy^VTxRanEINXQ# zob!G6$31t}VvW$%UEN(>wQE;Dv06``;N#HXfIuL86=ek-5C|pZ;SY=n{DSCuJPN!~ zdny`uK6A14^s#Wa0m)jsSlU2UoGk2YbZjiF{oICaBtalF7Ki5so(3A~;#MwBJQfda zczm5)0cj9OQpVTS!phOc6KZK==in>_J8bWQK^?56VEV!ud>XFuHuet6{_Zx<{GUFz z@^`cnvxdn?LnVF1fexH(JT0KUPOqFj#C@e;|MV*ke17|aV5XlOy@ zUEFP;!aR?;t@wq6p(0{D{Er2N#KgFu0(|_UynLd({DR#4kHv-f#03PP|9-)M(cGv{yi4(O$ui3>FFxY%j@Ig!{Z~!*$bD8%}%gM&s^FOlI|0^r} zUu7T6!O0aES;5BL!P~}K$=$^X`p>k*9sav60{^wYf6H3`cU=Vkt1K^|4DZ9{{$HE@ zpPv8=dieN1rVD)eALF-i1}xqkFm0J3B3uwCu0%ya_POu;uR^R@Cf&?)$0!tQ(6E>$ zR1nW$kRjGO3WZuLjEkBWLwlxi?+T?oYCxFxgK`F)LP|lnLMQ`)*ys?{PZ?(3efDAb z_1J#ym+HOBy~=~V1COHZ&PPiJHI*;6i|$p<3oPz0kTNjFhgW%-(Bhl!p@$c!|G&Te zKmYlT)jmQaRJ&`k)}k*fi9SntaP8QD3lcwQRqD%?CdorZC@oSoM4xJtS)Igu%6}1-D%}B^Pu88nv(7?17V2O z8b61hF*78+zO(tej+&wxPpfs~#aeuO-K=H2VmwcqedAFtZhPH?9IhfRPm6iupDDE` z^cUS&WtZE<%Ti+!ho2`GJ;r|z8hd;t$EUw5iax);JG?LJy1zY~T)z7&B64?<-K7w6 zcUoNTv*gF>zhB*4Mrjtv?|=GLq+Ddlm(lq~jl(bxcMyBF1vhS`gi|`rzJh4%HP`Z*arc$MJdw78oK~FPz0h zvP`@Uv2gH+Z%|PkGPVVO2b)Qs_Tuuk90N)!j!R!x(T+==CeVs32R5L+#xwVmgd`8H zTPS^YE*7fc7Xb0oDAaZj2=PGn2dxTytH_h_Z^kX8s@nD{Cv*6%$|lR7wDZ6G{?0yA zSC1MswlX4s6~q(|%E}eAQxdctjVbcqV;3n3JQ?0OM$-FGs3Q)Nkn+blkmT$2WBBJR zGob?kV2r&d69&vEu~O@G`S&4k%S7Snm}y}7?144#2|ov=I^&)@L(ueRt&*j-2FsJB zX{2tB`X(8ZCD*!{;ay2cF@)OZ8atX9YH9@48HYI+t&DmMGU|Ai4YDwDAEQ~-e$Y1A z;C7iI0Kj`YuPAC>6=Ai37n9@$$(@XLo4%z6t zNd572JApG151x>RL!~Slw$RW0I3CG!b>C7rZ59$FLLj;VqTD%nvbt1lMWiWq_WfCT zz(Fe#f6)SR%K{NWt|$2}LOx#K3)TofOq0c)i(2MYngM0>-aU82sKBHWt&@<@<5_dG zY;2q7GOK^vt`FMF0c{=aCGT%e?~qAqRT(d=sE^XV=B1*N7$V3in z@GJ}Do}?q2e-rFz8PDAmlO%m*PdU!Fx)g%+z1n?S4s3=%+wmZ6;g7PI(Hg-uHw`Y{ zyO$sOAC4AQ(%D+{@3p!RNxk(BMG8ZvdBEt)tDELI=H0(=m)BV#^Hv@qtR#>7Nhi-{ z%;-&getJ!QnlYvA_&e`-Qioh~QVngf#xJ23ZP;cK7fcX+Gibh6-<4dH&GVZ+&zUH?(u*$E60T{b2*L{NB}F7j)z5 zE9tFn`b6SaZQn%6^B8PX38fF?TDF^UoHbN5s!_%i$yVz#`tvUZfvu)rDXHX+_YX4t z*s>NknZf?7?2*qRf1adYJ+`8R(yo#S6O+byCsZcMie&;WsXtNTz`UV--|Zya*O=-{j@>I^ z8o>4mTqoq$KOBo{E%nWd?pCB5WB()7mLxl#9!?-WWXK%%GzrOHSLL^;_ZHY%k5H@M z7pwcnv+*-ztczYVCgty+y=~*S?2k#pA(a?3hlC@pBPmUH1~Dj&SK?$?JiR45Nxu|e zIeh{~6UG(oq)z_nr!gai2oK$~kef`JthO?b1;Oydg;_jJCGWl=`a%8g+PizAWcHqE zzwH0yd<#U614Q_9(8L`pO1WNu_w^89+kgGmZ*<>3_km?^Z9hlLpPjMTncgz`BG-s)P@gF*h5Q-T#r6f z8piC#pfG^W1O8%hf+8B5OG4>ea?jXbk8!D)M48)N%@g9sOu^h)DjyeKih%elDJ0om zU3BS99;7?2~hF6TQ;%s z5NRd(5hbDHd}$#x+TpWzLa-Pn)P35}t`$EP`zK{`qSfV}Bo!1iqH@a;{*XVWy2e!m zG@`AhfbFqLA7kq?&pga&ISDv7E(&jpgWJ$Z{Ii%8!Z}BV?*+1SU2;A7K>Q<^gq7EW<$|Q;ETD`gI_(B|jD0Bzy4WDog$nkm$kseb00VCNvz6Zhw%Fb};fKCOwrpd+z6GBuS8DGkeRT0i0rdNl;R_ zZztda6|v|8YdPgaW{f>@`Rc4@`E_;VQtBzy`A?r>v{cfkq6}C2X7_tbamYK{L@LA|L0>xh zj+v0f*PE!2mg&Q3`dSl2GEvn zEtrSn)47`a<@_^|5&FLD7oS-kyDz#S70(ytpZtt@^ebTS(Cn4Y5U4`GC%oxZ&F;nB z^&x%G)wklL$BVA=q%7We5=xPvvf}lTUJ~tGqip1TbC{d(Bsf28zB@dtK3(6);q^{! zAAyI}#!#wrC>1sJ4|!{$H5yZ^MB%dH`E{FZN>r~Ma5Mt)JKf z5uKWUP1XaYY)Y;w~!6lm@65g-=d@H^ax*FCDwj{N( zem?I1<*b$dcIU}ta>&hk42f)&#qQo?6gG+c1$3i=aHf77iSWL`bE zK4Sfw<`d&RSBxo+(1c|tS@PGly6Lib9-7ZysXYAuJ+;# zo8#L?p=H2Y7ufw;a?FCxbeH_u9)k7t@ph5Gt}RGM#8*9zz9ummcQBDwWGobmS>;vf z0?@%%Od_#CYP*6}%Zy)^l&@lu)5mr$BJA14qmjCJlFIMf@Q*-z=AGAvUCVoh)YM)s z8WeW~2)XEShi`9MP4*=<$$A&kgq^KHI%4H(yd4qSW&jo}e}IQc)20DV`BaSMn5Zf= zwHMPvMvm3wLFetuUZxQdJax0m!S#hZmpP}E7ZxupADLZ_=r6LBUq0+T8Uz(jafW&M zMd0y3xl73P0`Y!6{2K6JO;2tl;`{F#)VUkr^OmE;$1P|_C7P%FNIKG>vkzv%M|=W% zD4Z2LK3x|Byy8%5;kdv1lTB4Y)`+jK20t4TNoAH&BrV1JRLPQ?Agr2ydN;)K3%rEx znzzv#oZlaIZOe24K)8_E{jlTw=$eV!D`b=puXm+YRv#n%c1_j2H%U)K-@sE_ws6@(654;vd;Ts>Zqee~fla9g6a zyKiXd9r1{n!@i=-@&jf7u%Gz6Mf4-zRyi1H!YHc4Fv78)&}YCZSjTg2+aj+F@C1{z z$Zk|(BJ9=%ISS>h5Boh>^a(6a3x>WmuhnVa+%d8+e??V6BlI6#Ys(8+4m#&XhbSp2 zeXWHzlEcPtk)IVR?;f0U(HMP)zP#h3hZzyASLY*;;Fm^0`!AO3Rrm$=-1yk#9F}tK z0c>yP*^PpAUixPQvA8w?i3F`374;c)N`)cYoz)PD;mh~0>0;>IhZs(# zJ1}8r1khOaz3%Y4M}Rkf-q=r~Haj9Hu!nX8Mp2Q-$DgaaOLPhB!%7YHw!?o#k%f~Z z@dfS3;8oqgt4(U^Gh!7My?Ld+s#{p6Fl?M1P1FA#o3#LXZ<##giI&oXk}OPeL?t_l z)=Zui{aWtfxZL{S(JL_hXa=Y3Y}26;mHrxzWi1+damWG{aAogA+w8TB_LIC5@pmbX z{?Y{N>s{1SQVQxg2oF8eDv{z{9lL4{xo)<|USG#Mg(gWpWvD);sv(qHmW9_MaUv(q{V3nLjoqa>b%+ivMcSMuPgbLZNkZ3)RU^-ub9iV7z7fR8 zn^U2CT*zk;w*N{r`11lpfFV^16Dl>z)%eEqYsrzWk**1l0mHJCCr$QWIJesTiR2Ar zFtW+i^>thbc1f1_oSbW|dQ*$##4(&zYpVrVm>*Xf3_)C$9Q>zza$EYm)jm~`Vs#S| z%OwEXWq8xA>4iPMYgY4e7C6+@nFM8TL!%C-=k1Dv${3SG<(Ar*QdNNQyQ#|8qaFS3 zHhDwlP-0R6_X$>zGuI(Ap0b1)FcjbZvfJGJ6#Gg5c&iHA*7}@1i_N#{#(q`bScr*bvg#9v*0U+*HiUub1s|}_A zWZJSAtFRvkoC$FT42H62_3-*Z8poC(*wCBcFC#QdPt>~~Yh`;W@-#gt^tL)hsBEoy zWMqB`RAGo$YkT9$K8`}TMh~okl45m=w+Qu{$)bRR-L^s&PX}0of5?)5M?pX!BlxK{ zlbg^FuaJvAdL_muNsZ(d*4EEsQX(^6&TKAkN?nG$>x#K64jvD=`@7)Mj#~4!(3^(0 zIBtm(<=&fj+)#$>rVV^#o-HN;r!+7RU5T!G8wU6T3#9!S^)6{yk3tu+q&Ox}1|NP= z(`4%Fa(B6%Od$1VVkg(GDAu`UHEJ^U*ZiC8NEgla+O_=C(+i7zAT?<#a17IZP~B-L zW3e2s8pwQWm=E4*m;1JQ!TRvvZLZ~AG%|D}iPp&Njp6jf>%@cv4iqAK^?GS~|02bQ zp34<#;)Oi(HN!lRQ;-cqJm1>Vydu zUYg)P4qu4rJ9yfLO9Sk=7l~wLTr`~Np3ZkA^yH*KVUQZ}UBPb9N}6Ww!wKkNTA#vW zf4pnm#&)SjboFCeTSLZvdHdk= zgZ9Id>5GewYkYVmN=iVk)}kH~67cH%a&4jx5Fnx+m5=h@nC!k{`hFOVh#23|h^X$C z|0jxgo(!{|{^`89JccC3ly|-7L+5hHijjPqH`sOarAq6+8(5HWBZ#_=2Z&-VRs+p zKdhxmvz#1H6|6gzA|UHr<~UYUVI7&gy&DzFB?Mx3Er0jik8T_#*%}6zNCJ&TWJD1q z1~I+z9}jUVR2cT7>>&lTlmdmwk<^xIE}I!(2^~N>{fWBM)j%#BpxA!cDZ%@MMPL%d zgv6g`2%fba&Ea>B-U5gc8)HXn>+kl^<7G}9ItDfLIV8bZroL%z_5xEq(0u*PD7{Qe zA_R-h72XJJAnjYgHqIi*VfWvX6^D}()eA(mM+)Fb7J(@cQ{>-d3gK1ZLUp%8ZaCG5t8F&!VYH}uZu>2G*jWS zR#tj!OOU5C!c??ay%z2Gjpe(l@GKBifUNJ4ylU=FPk@TlhtWL4lY6b|Dy`+S&1^H% zyAH3LB`@31QI+NG@zf^}hss1?qsEP3S5khEtEZ5uk`utoS`ZjIfKJAT`?);MYTjX3 zSp!p#UrEJ)`Jw%CFfIM*y}(^osQAaUuu7?=K1yT-lZ#F!FnRmcGQE_4quc9;9?@b6 z=@hvL>6Lw-(jEdC@5qZ~HETPckL?TmDZV7LP>-D?i36&_;H%w9Ib3e*MSEa~XJm1a zJ2}dFgv6J@ddoz9fe&=uc+i2zq}NO<(U9Bmb@9QLSMrX4Zxz@VF=9X;&sDFyXUC1O zL)HR#JI@~v{d1GF?^h*w(W&2UE>X;oj}X=FGRYSVjDTzt@7S8IJO8C8-oYZl1wXi{ zmz*f4n=ks5sGEM%KKO2dPaGbwVh7~Z<<1kCy{N2GG%V;bQw@d8uZC_^Tq<86Xq;iV zUO-8%LUvARpn=_BDJ=L_@v@WZGHdEdt1O)c5pbi*ZhQ*p_B*jhh zFF)9iWUzY-O@*SQ_)bk@N>G@1YGXkk%=y+f@%e5qjv)(Z2w*%Fs-#GA#rY_PL@xXO z-Z(YQ4T_EdHtK?4#8s&LRo=P#CjTYpMm`z}6kF$@^MNd-2e{(IeZbbQ2$bcZB&fOG z+{BCbDu4G5e%8vhS%d}_${>&cN$Id?3oLN+v@(r(MX0j2)|~Hpq8Hxf{-dt3r_M|X ztad*xR{3wdw+nrWaM6L)pgtoVUP@|AjP!qIVN|LZf@Wc#UOK_uyh!rMd)8tS81{*2 z4MoV-@lzAWw<09LnKA&Swog|_xX^F?&t_b0Qi1&{cSn?ZCsMvfI?&?2ruo=4sr7=b ze$MrQJ#AVJ4tSfvD$ryV)Ge{q8^`Mzd9m8@p~h$ z@4<}lNm9sSW6A17`V^-Hg+?5O4<#OL@>-IP~zqV0(6T@`*h zk5yi=Q1bfsLj+2xc)|2LjgSxhTl%t;Q0jn`UFffsyI-~PLVhRK>kz=XhGHas4v?!v zO(z5bzq7sJq9f=;1%>FJ3WFuL%M0OroB{YB4-9DWFLqEYZ(zrT#Rpw|lOO-o9I8^- zILNS$QCL52mO20@GFi{ihP=o*b7-Te4!)SB?5&gpk(w8Ms7|oOxwn5D35yCY{-uw@ z{W5pa7wD}Z&h#4hpIKA8T4(bP*UO!Np5)QS*F<9fE*{!?`SHVEF!Wd@&E1RL!CK$g zE%LXd2ylerwh|0?mY^BV*$f}O##3OoayQ%mkcbnqSc(7zd+001e79_)Yx?Zb)UDZt zd{=VCXIHh#`lv25!LA?IcN(?9SpSN5HC8)!5X0%HEPV$M3^{Qt*PoWU4MZWI#$lFx zFany-iIJjeVsISGODgkACE4^R-$QU6vR8geAT^yi_@NjLN`F(bfKzV=E6>M<^0&gc zUbu94HTGcAet(Jcrgdp#5mA*tGo00>bT~5=w-mxyZiEXbKWVFikudqP zQ~B;CSKpNfd7$s`zb3t{RrA zO?oMb%eQgcc%fv#wAFXyMai|*szh$9%>LK(XXv&05;}oW(R<64QK>; zU%IaLK1}AjbnX^)0X+3VP^??v3L1S31pwLX0Y;<7e-p^x_uvsZUjY}peG{eh6{?L> z$rI#c{hds$qB2UyVSpS1o(0679e|fA4y#I%5o~av7@EA91DNSD0J7=f5HNf@1ajdQ zf8NU64x%A+u{XTV|t^!=P~@9k0_U$kAWl;~~_tKluAw&2OU5)#}2 zz(72JqdIp0(lCL-)Yrl%d6o_%=if}s)g<*PsrEm;X1sE3K-d)V6y8@#+pTx?9ztMbO28kWV$m89 zh-M)x`DWIOF+;}ER{F-)daFNw`ENGprK}(Aq@G=VUdpO`PXe5GZxu)^ zL&jW5Uqpp&2|C?`!W`HG7(tBa65yX!(50XytOhJ8E206~6MAj|DcYPIHcd0hmEJT8Mhed3)Q>PoBw3UrYkg~&LYjc8G#e#<)4 zh_3%#_D_T0tkT1=lg!uN;7v-uyrEyX`KZu^CS2&<<3Mx~RJ3M{EUY{>MA^AdilX8) znR(q-o}&_gYkgK^DX6AX?ynhS!)sBAYIzO-V7vp6nLc6hGcCye5fn*Z`ma>*o&mW! z<-pUm^zjcER5sseT|TdJJw@F?L=p{?0%`k;#xe2zmPBnfu;F=A_Rz(fHov{hR=m*Y z&&7Iq6>^}tCSt&!(7h~iG+7`(L@erokO2SB$*N^7Sn_zCja>6v#zTxm0qcP`@`DXK zW#M48usQ&fw+fk~&-_zi0ZJ`AV`bqdH^n79)40p8y_f>bUfFp*pX>2Wzo5sq;%!YP_9qNg_#6?MBKM6sGUEcPhO^P4Rofa1 z<#_?ASJ!L>Y1?Y&dvmNifBxduR*yQxdz+1WNpE1OHidkuvLg9T-BiwW?Y#v)bN%(SGun{CUEq#9 z{mbhpXhL!DmBV(^L-J@`Ys`Wzi=xcc{;Qtg-9PvyaxbpSP((me*pvZv!*DZ=lTi=vzW~zHhEJ@_P306QZkHhRYBiTR*B4FH_fs> z%H?lGSo2Ve$Loy0&R|O0_UHMqMtF&jZAv-!R{MJ5LZp|aGUXLuo{uEt5jM1$ET7aH#OLtd%<2vpfDV;bTR zRBPoNdrPF+7L8)~D@ro_i@fuSDuS6f*Ya0M1|1pGrF$rP_sd$!e*IhNNego1?(nQHGDX&qz31^fo{q!;&X4s4<_-AzmXvna+A!A8Ld* zIQ_~3cch$+j@eW}F=;<#0QRt-vgOrHE$%_SHwyYz3*gWSl4&)}G+_yd_BdFgB#8&u z@R5zno8-0noQUPDj-x=q;bhEAYl=XeU&fm7z;jnbt`kxp|cO8 zNzSM%wh10U>+>e|#5B1?#@c@lm}1xu-9Qs!{6G_yL9R6gjPie@pnODhfmu~zRoYeX zt;m4?L1TevfnrpaySlg&-0g_`!hpE^e5(wN5|sw&M)mM$gRUcrnXflE zAs54Pe&ERL!vWPRo}r_Qi*c_nsJY$rX74DyOpa+7o_tc4agEc9>Mn;f_AU48XJf8( z*;>Tp!`&O_s5O4`-o#Z`;ZV`zV%|&Fv~YwS6ky?EQPNNlsjoW5fl1MS|Ij!4y_Gf4 z1Z3V~6Y2O_8m`yM?x=Ol3=}v#Y9rZDt~Z*+2Q^quc&nplM@vz(&O2L;q}{8W93ZUi zB!m@E!oxVGPUA(FH25PPyfuz8gI&ycQ(Hw$utvzNK4(vK+43-!|7*^+Gor{Zc{e7Fr?gc`Op1cQXLy& zdUOkyIiX$|)57K6FgU+B@Sx%6r7ITl1aW=s*l*^Xs7p_IfhQgPv4!Q9`hvOy6@EeQ zXuj4TD}PW}-`|>!f=$FoxME_W-JU6jk;9*(1}_woZ7OMDkarEW5^mhbSUE>=gyRxo zU-!;?y?>keICRFwiu(qI9!fcoX&RLijt+nPiv{uOIVSuDL>=rxflDpw@54+(IM4jv z4(v3UC7);h)95W&h(ZWVunt1cb5QVo`#X-p)w<5H`n@_-ed{R}>9?eKsJsWIh!&)0 zTE$YkM2eA5RD>adOD zi$Zq?ZADGg=EBQ>UE~BrgW!DbE<_pbPT*Ij9zh5uxlGv+(e@+xYqS1M4%urVM@BOG z$w)b2vzRpED}tHfOOJ#4TR`GX@^4Hvyajw2lXvd68i zw?Alx0AbIogG6E4y|3eDu}nP(@9at+1{qSrB>T@beud8B7zHd=k$;TH{_#7 zT~Ktz^~}9dFa=7AI5#0WdFMSY@8^lk8CiR5x1u91H-2>NYS4;R6(Mnxd&q?eJi=p3 zyso|lQpIP?Xkgt^2cic1Oaf%-?ld0if*Ga30C1zNfi&(WiPueGD$`9j1^dy{{=5L-4Z< zHUZ(>5Cj@g&!-q%Tx!OXNCkY5O>xlQSnoNT>?XlWEz}qnRRQ#ElflAFa;=YfAR97T zt_FoHA3FG7G7$TIrzJmB(twVOELV~$H*yHW{aM6Z+b17c++PCu`EtcrLPT^e%=5(d zhDO29R4C$}FDh#Fq9L#&T)XQPsY;yUDB_^97p2vE1MwL0Rp`MzI3GCpyNewWxU1pa zjEM%uX^U#;Tu@9RXunvt9*=MhsRi4QNQN52k>ivx^+naVXv^H%9=*L56dh1C8PVo^ z13W`Gg3m@Xok$O|J$i0*oOxKr-9Ye(nKhs@_;%k+BL3+eq1H+e^q{XBH&iiOag=$6 z&|EE1d`sK^8Z?w4FQ=`j-J_anE)y4fAebJ_`-f&d_-KKR;)orao9@{`NU9%>lX#8V zQcrn-2kguEAr*iT6s!_{ns)-XG)n&TlHD`O!Cl5ei4Evlc=NcZ7F1{zuQ#qHGt94K z8DvgzETXPSLfXJ=5v625LcCzK{?K7J1ux+TQf{li~=56yIz1H zxuUy9a`8i@7H*Y=?_^`iADHZ@z`5na{ua&NvJ{2p;_sxl4Lx>Ap2&6dWmX5N+zpEy zd3H*Z{mEii*}irNr6ZVcuyQKbrvPgjb4~+niD*@idt8ha^NyrPO11>8Q%6^hi#S43 za*zV0`%U2mbsSL_>oE^Py*(qoU;d=6bG{USZa)lz>u!GH=R)WY2c8;2*B6To*3($j z{LsYPkXYReJvOQToJjp;3W ztuJl9)};_9*v}fhQ&A#B{VpTYXd^TO+!L_?!!J68DCHTECglMf8Lb3CMPNf?wHA3Q z15~0cv^f#$ST|E}kdxQNX@O60zY}_jE4MMJTdS2 zCc_Xg^>?C?WHwD;{LszV_`A4w#sF5Eks>kCw=NRZs9d@6$!@kLha)9gU?erY5foQ6y+7|^Gap9-m416h$N6Wbz? zNa?@)c7C87#W8MAnKSWj0p=u3t)pCkG?`^4?T&6yP~0jZ%bw$bx~D8}v{kSYV-p^! z7G1B4q^+F>&wI^FaW6z;i}<5L_q|Er#soR47@X=;-&Gz*Vb>91OY5yh*>m9gZvy1z zFY>Tu`>=Ub8-|8QW&0=DG0*u7cLwQU17u*1*I37{ie!73udbqYN1~5U ze|n#}Y$LS-OIJ*GBt%qK4!E;g#Z~!bU@nG|%wj@O`T^dd&k&~tV;>rvea*~X$A(eg z{z#4LoWhh)T3^UH^!3o8<=0YIXofbT4)RIv*`Lnu9VRF2f~$hZC`|pvqMda;7iQBUax!uan57m}~3=BzPyPDN~pX;iD$kG9=(}p%Bew_Z*hZ zW|%bIU9M522SMuHaL+l030pqim<;{Kl)jC?SLtEy0~7VUW_&FksBc56ws3t8Ij64? z6{xZPS$>8FL6p_7O*lKyEKk`TXsO3$T72r2KtHzOrI~XvXgSL6Upg7(EFRdzN&Ts$ zPA~vgL+4C8{BnP4{OnzUipjYi%8=0`l#~m(!G1#85{yg?qRC$dDJ>s`5azaJ7#Gxd zm`ojNd6zc&+y|^6g~e>JijWMuNZ}`6H$oy0^B%^82ocxlBM3zdA>2mLVnQ9;NNo`f zC7KVO&b+0qKi^2-06(%g`;*7~VLPo#0-bBVwSt(@e7F&YB0J*#-0&M$hAg%VXs|_+ z_^(0E5z`GgrKB4Nu_1)x>rFZJ}yz?a)iH_D& zOGj5U>wudVniTFoGex)ZZY)xpo)N&kwbhU^L4xL$&eQe+mE$(7AlPpQX5X=(?1WrB zrcK6)B5>oEH)6s20CION(a9W)QVe&*D+ynYt#ehUFv(SY7yj$2--J6X(<`|bLH?xR z?KejVAr>@@ne%g)FrH=^7a6-ClkBUEm%dablyfne>WN+J$udi!YT7fpKY6&eWPgVn zHtd|Mp@D4!C73qfzNTxOZUmQ{Ng##@j?3iUQA!*+aK~0)2Y#)mHF#0! zFx0mO*lQ#te9T%2obpH>BVo}s2`~tl^8_oL(Wbi+me^d#!E_ zv3Zu8@!t953>8W?#&oR+P(a@`Iv6DgEj~dfE~TCMFEy%&^3qnn@J!iuH^V1#n3V}v z*OlM^)$si=Vnv}ndWLu@&1fNs^YbGTeYRNIBW6{63IS~DHKXx;h7{CX811SZ7x&)? z-26k{h8)2#$) zX%g){G!~SHTGIsHH@sf1o}LeiK4cvEe> zZ%m?}tEP-}1r`qw#O&z3@p~L29O?7IN$G<>V=0TVWZPS?Tt4r$3oGi?_t6i#`(vp$JUoV}b{XBN(##c+ry)9Z%ucgdBJ962no=j4bw-?-GI~D^ z5lySZ)*aDO_z%W&_?_}Kz=Zfvn2>d|Q*P&2$3~~z1ivR&$>?DTHR+N3Hy)CUeId8P zDW)MaE(1%uLN7fvjX72!5k&0rVC8Xm8v3_gZrO?UclqddR>f(*ZV_h00^rY2?aa=^ zOJmhsd|O2jGBncXuk*15hvhdtk3fAf_YG*G>Zr*X5>H}a1N^J3RV5Rh z%}?7dwR?|z*rVs|bo7Z4gchH;LAHh~^@?Up@= zXHF@HZSBalN?CMfOlf3dN}+bJSJiVmm&h;oIN50nL95zMv5+-LL$wNzZ&xP zVV!u@)MeEqlyKY~yWXx5XImOo3&o7#-nG^YyYAw7bWR1qP9`~X6KKB|Qr1js=JjP+ z0#OUPxz}8(UcaPjwv-!wR3$6zCgoHxyRV9j3u=qQ{-;;!3>vMeCxTTz#)b$m^RGIE zp+8p1U}A$;jv#QYWDZsC^PBlL z8b=v!qZ;h$a03!%v5~eYBkW^mlYDOs^GeeAi8_b_{^+|AOOk5*uTNErMsh~6RZlMU%7vl@OeWqnwm)*+0qs+JA z*&`9Dn}~qS`*i-H05;K#`tzQqu_CA(UH*QOUm!`nwDd=x$|m^ZO~T7B)otOMEv9K${X=UArO?lYUso$iA#UC02QmKu! zQz|(WqDqzN(G(08*9K)7p#hi-jG(knn_ZMD?FyCN^j@cVo?mp(TQkUs<)9vBlZ~~V z{W#dI0&!r9V6?l{9CVIdY2>+I@+@u^QH>Bq?bBTi&X>^>@}WH;_#l)E1c^h=Eu{EE zIDN&(j*4FmgSl#uTurA!&omH!nl}>x@)FgXjZoJB*|{jUl{yj1ESHcpJv1bZ2#2NL z6t6$Cke!P8##LImSt5X^SG5?4;r$#1dR1n@v1oC_k{(5iyCwHc-f_SsaUoQw*%>qX z1&Y2Qer1AQs#leB`}H5c)Jo+6lf_i~BRR0Oz|)x9l)|Z+cAky26$x~XRaSOEhK?C* zJ=l=ZEe|yOi>&*fNiAYu`vmZ#C6|}jXKzx9XdC_hz5tE%i=kLSCVPpCeu9Xs@wZF; zGCVex^r$ueaH3k7fQZsK#?I811IX;&dqVRaB~TzKdp+qdhA49x>~Nm%>d4U!Bm!5! z-VJ0j7%}2P@b!GSg!b@Zz8WiZzfD*bYT?6)E;IOfN?_RIwt7;k0eP^+z?dfm2aYOa zNg;I7D{oe1)GIO#CBcX^Uuw<;l*1Qo5RdqrB;!ev;--jdZt$i6OBL+|AGAuhFG)#e zOU9m;76jBPXizy--cvCVP89Np#Ki2wO^kT?|9Nr?!<^I@z5?wj)8&uF#l)5CbN67RpfG7KY#qq9n%FV5Po~;fBo=uaOi!SJK|G0qrOa#Hj zXr%B%BrRVF6rI2&HO%#V#4IH$wkb_-y6tJ{I}E&eP%oB7mWCUcY?8?HHcys}KtOX8 z3zr0B!C34}g>hKI#pfy8%R!4ZEN8BLRqa$4#l-TB<1dq}=U>TEp<^0cbfR8?Et(*V zvb})ixUPnmxx%R+&zpNo>b1vZC^?=1Tt;S9RiBCX(Qv7=Q~PB^dH>q1lE41|Yx#(t z9;_*84HVmH*Yq!NAbmI%!LAL&vmYfC&2&GRzIhCK-**ObRsJj=_yw;qFoMYI&)n6= z06QNeD`M`fQ&FH4TYi68d>>l_Jjul1GguN9tqB67=5<4z86UYSEx_VG9t5ASxjb1= zw2$3l$#3ygIo3m;)i%t;81-@Ghx!_<_!(O$-IEoxIH4;X{T2+Vs9&q>q1mVIfNg&@yOJ7k zem?>N3MGr8jY`Ili}$a4nLf5owlot-sy7ydck#bhP3-wt@x^cDX}P+n2$>TI7yUN~ zKTMH7W^?z}j}iqJVIPX{Mk;aKbm&OdCqWwtp1X5-_jrs~aBxtdJCjbaObW&3MsU6G zOHxvu5w9}Oz*zLKudc!n0BZ+&UVp0;z>uHjl~X{kV?q@&ahRM5{V^>5Bo(0va)%c# zaU1ykQQR)N@X*iFobvyk?xpw!G7>@dg-I}_H)>I{Dy(foRas22r^iW2UKoL|NZx6a zG&Cv)u_XRc4M!_TFqr0!98DfSj8p}oyPl{Qs74Br#v&}*WwyACLN|`p{G0>Ba`L$J zKun%i`)zYM^9A34!us#|XE+v?Yh$R!N#c9&=C7>Iaq6guN5L<0OkbdLX{{NH*P5J3 zorc8wzqI!>h!ZD9>k`Q0<>BDwni};FD+FO@ksAr3f|*e*hFpj^-{Yoyy2ihgpLLaH z)1hmhS>tQrZ)IJ>jB@3yAFQSNk+d@(la#Z3HQl@w)8WeNR!&;J|2%}m3qHNQ*Kc9eE7N6vJBk)JJ z+U4u7$SVrVr*;0E^HHM+A=Xa5@cL|G30Yj>yxd=pZk%WgA_zX*(iomCpT|Frt5b;#frL-Pf zMC92Qv%9H6`dlH)uMrm|LtYo>Nrx7~Jt3YAR;JpvId;a5Ya-cG{C?bbRl?77Wr*$@ z3eajetGtZ5>XRlL>uOwm)+m&GHHsuJgYq5Yk3xBr-i@bUNB;>lpMhxs+()to7M%lk1>DKm9Lt!v9@B3$7?Oa^keWgI_QutHyTLNp6$t^ycNrH*L zuWMoVcE`0b6N5SC?(c_xrMzs^X=n3nIJIxn;eWRG*W(ozy^z5oByXZ((x_#Qltn#f zy`^;h;uh{HU;Wtz_rx@MnOEYw8vLp*`p&GvILH42KNyh3jhuYvob^>LLc84xIO^OI>>O8_ zTD?{uwipNYPo=GYwsrHw-Np_3>N{Jn$XoQ zAz>ZAtk+=SleK|-9>+44w%4esr&9;p)##hrWc?vLK^iNh>?)y( zh);dO%^ubEgGZ9vcoW#O!=K%G*ff6TjB|iHK{vonO3t_MRu?FJNF*bed)ws1&{h`M z&-(IgrO;@=R&a&I^+nbL$~vrRk7pc)9%kA}+B%~8(^&3OsV)c(lwBaBPnz`RRnEdrm2M`NfVKl`j(bJV~0Bxl-bsvI*ewQPGFu9VEx6jwpA)!5qF`C9|hedUt_?Y5pia!!~Vv)N|Mo_{?^gt)L5Wg3tW+}a|hvp6f&4E zA_L;8>B;orMtTu5nei^kp^k~hVKsyvpdYrGv_(yx@%s6@T$`9kyf1px9N6- z1jC546yth@FNId^<+DC;Cem&XSuXOWp+*YTh2}>fo7@OLJcWJT>L}i1$W2hOEarq$Q%CQ6@)Yn;S)%!JKcGAQx{yK|afrCB>7iAqpb21=tu zlsdrXHV}m0a$e!HT4to>_nd(R>7avj(uxv_LnTE4(bAERXqdHzzzs%-4+5QAo@P2B z5fLK|p4=&wZKe2g>O^$OEG@;&A#CJ(M%YLC1m%O}ifkbLmrL~fbpe&G`jZzly0zKWcL>#L?Ut1XAFUN&t%)w6y79-?$!N)N3L=v0XAUW|mRiM~(EetWXFGbdZpGD0Zu=y^Ot;^x_Gy!4 zZ~JA6FRHRfRPO@_-4|^$e^$N_`B)Y%_5D6fCI-Ks@-kw9!;xyKc+6?DYLG7cpB{nL z8i(7T4CX5e7YAW5&VaZaVt9t^ULYTM=1sUpMO14DY9{K!n`08%amu=;3{@}7+|XeJ zy1R4)?o{XadsjW(A52xT$5sygTqAE0656tySumXvSrII0r#rqA1-j7_$BhFbMJ_3Us2*JQ=IoOv55Nw=-RA%D#o@K~VupMh6za4$T!;F=VdPMLjbibuFau&@$vF$nxDTP;Z zcuo2Bd-tr#Lk!qO8vH_t7uarBL`63`VMKwTevWWRKP%%YV#J5{`6=8GBN%}gVmCJk zw-!=jRdv0QmR6l5I}_ua=)%r!VnnW?IHoIiIdl)H!$*V_2Pn+4Q#;d-b2{ErIiC^f z@1x15ybnn~C%GCWC)tg23&Q$9orpTYxPb>Uqt2AvL5M&9=FZ~!FH_bNMrZ>!U+UIO z;gY^cng8r8s-s>%y>IL(4SqhKZ{5#525GaB=_WkazSsZ@kUzkGVKqy!=Z_nysPzf`W)K$`w003frU#LFc%RU zcXqicp_F+DzPE-pDYZCMU4$BwY#fm`EFtkNMPEs3yM(9OLlq)5noxr(J2m0AnNGGg z+>mK!tW)}9-lxkvZLoYN&~z(z-Lf;7 zLeLyBx5b3P>=T%28tNQI9$70M8-6*_n#;F^JJ2T(t7sRI!TFpK_6fz`uzxL1_~RAk zt`r>-8i@PY5iZm)!eM3u6=S2!7S-7hyh6R+IVhY8JA)4`&&&;PxaW47d>CUkl=}w~ z)8_blX?>V(rs6`=7tzX~_)Vuf z_rE6>h08#)vsbVkcuw?eJVB^pL24g*&5vw3W&|o)$v{D9LK$h<)8;+WJ$%-$CGj{E z(I(ODBV8-XwRo{68llB_Ay{j5sTL!-#CymN-+w!L8Pg%zL#%@KC*G5HOw>-WT@6bl zXDBs$D7c=Yx$H@dMbKyHrwB)7YegM23T`w!hI1oJWsoerp^K!+BFQAE-BIHN;SzHN z;n*Xp{b`?TEOKz^0tsWJod+?8ws`b%xl>fOgh2n z%9=)ht0IiGeS{Ys+?vwZBdp-I#=56Ig%=&+nuvs9tJwQ`(Abc%J9^xBHEdOKhx(ZM zFHyB3Q3w`es9@>v_J&a>E67XGUk@)VQ2a^0(wyEbQQ1o0`kN!s_@?#o`uMiB*=6g} zDkwwcPkcyS{AEx{0F`Z3n0pWX^zE*2c^U2e)WLtY4u(6La;PB8{SA~+i-ZbxQ{3n;Zts0*=B4FZ`-s;JWGGC@{?GOfcQ(I`+)Sc z0Iq0MbjjGGAtN{8Y{u1B)6~UH`@b+L1EslYGa`jen_=t&!yMbZ?~-K7$&A`ljoY(E z;>{iWY3+u8mDvoCW?q`-%UX3Jq7wJU$XSt~-uw1H#2B0|MCprEt}KXpcf|9?JF?t! z--fD(<^TJZsKx?oxyH&VFnEq1TPTM!*?A03QJ$k zD+kY60u@)YOlfmqKtfB@CM3(vZPI+yXZvz)fY2;;;RI6~26k&*1O zX_yxnDc~s|dN_c>c8<6bNyJw9Pt`K7R7{M7<|9|#gk&0rxdSh7e^ns7m!V!q3ng4; zWc>9nZU4$k0RPhBsy5Ul>LQeX;_iLa@iL)yF@_@^%>LFI7&;O(DEIOQ?is_vbZt9+?JbyWx}O zPD{>PS!b$d);?QbK0O^dDuy)^>6Zmpl_rKX`j2J5Juj0~;=_N`Ws!%B&*je&7D3C} zY5O*^$u$-o{~k3QQo!jcB)CLPpB9hMF0h3;I_Mh9pB`-6-jPvM7W=0Ps~%4p6K%N0 z5#kktIlHr)3Ks9m-wmH1yUHVf?*5zWWZT3F)^BDkj0rJXtHnTw__Ate<@aTdd@HD= z`JJR-l5-Ur10K=?C49u}ja>6m>Qv-)%7GN7k!SFpaLRD0cRep#(rIj)S{@q0_guxF zy51^sEO%;4H>~KOMCy&N*}RTCB96pdkL{b%voI&p1}xae5GRRFuc(&#qoi+cpors{ z0q_rHkL%E|^+f~IT>KsVSNUDyXcP}fGnp~6NP!JVKNrG5;p+A1H6IIRGa_HTL$fq) zslpII^aa_>CY>E#eg5mp4d7Kfws!Az+2l(D*DABh9@-v_WN zx>9;(6ZZ+@X#89%6Mv|n4KIseDn*s)8P9x{xg+} z3)aH_fxEh!!kCj6iI^5D-yWgkM^TI;0`1-YCV-A@{FaGRPz!OGSno=n7{TEEWWa#{ zJ@Q)AHKP2nXm3JO-8sgjXK{T_gSg7C6<&d$y*D64P=OcmzKSL-%NDiWx^{zGxnc3e z*?Bv!e^Oc%TJkI2Bc3r28oy8^NI@Ve2a}CCY698@+Vt@{TLEpJEvoR-nMI;~rYRAu zgN0a5M>GM>4tHPEvxx$sSPO3kRwx8TLw&VHT#Kgguwu-6Bnti5z#4*W+Bc7SWH1!t zlMp8cZS`#uE2LTwjRLl>JUw;W-t2RnJ%Z8<&&rm(gWIpm{9C~Va-9^k#5$^1v2EkG z%v|S;h+PPBHXoIAwGL}f5!J{%bk)uMY+lZyULK)BRi{!f8ye zBI}myoRx|jfhi6#4@{@W1S>>);5h`|Ak0r*L@Y`JB2L;5o=MY z?7qR$LE!DHEZ=XOuVqoS2D`B^&(F$RM+=gWqAoeFGWI zUc(90(`Zt}=msQNKI#t3qNv0qL6jm^v~$7!LosBI>S!BxhrP>u+6~fN62|sXloY#R zzswl9Ysw*L1MXjjbk-X;3AzWDLGXy(2(}95w`6DTe@SV(5*wku!G*!A!A^Q4E$)(j z8Ao7`;}}F=>N2ayMLd3E?v1^K*lMd8+T|tZ)7*m<+TaMx$qF>TQcSiJPgTSCR2vmv0+w*^Hq{njxHLsz5*#B zeT{1>_r0wAtuO_;9|uRuID7YdzlSeONZdsXGdt~X#!{jePlMnRUMDGq%@#Y4%DF{; zY`8n8>vhl=KFdNwICOi{-qi4^_PHGjJ98vg9t|H`&6t7ZzvUDfvzP#XK! zSUMKx2SXdg6fY9BI4)|A!`r54Jxf|c-Ho@7m6QF zo0ld!+{{SgzB`|(Lha1%SOaho#r4> z6y`Nh;{WpRh4684FdlKm3h=|@X9h!Ce?Mj5nMTlTGRBt?Xxzn`W7d(bwEu_bd{lYLhX(T_)_3lsy! zC&tR*C;mfm>c8J~31Jk3A1k;%zPK+9(5DQ7%u%QPdBrUWvG4fZP=byVx+KaV8BkI?AK!*!>`(uxVXt_H;o{) zB~Du~-6YLd>yi&?fn|F9NMvr*o$3MjS#_pgYMABkA*;GAwG5ioBxrwM`S*I&r;(a_ zr8^hZq=)^cT-BE*S0lSYu_cS{gMCdg&$qgN6`RoS1|F$@5_cVwSP|YV@W`S^`z(Hk z(DRD7n8z;u3rPQ|GO4RVQGZ`KI%T@{GxsmEy9BP5Ltpyrp@`>1@lYZS;C}KY-WNAt_iRS=?~#QL#Oj_mXeK=I*POpiA`onPB(tRg&9@Zln|MBiP+)=*6{_4Ec-|c4 z5^b~5jHrB0;2}^^cf%_@8$IfnLgL^(RQ3-^oR+%Hd3DQd9rUj$Z^^ zSSKdU&<|&t5s3)6YiC6fJ>g8BuX=nm@We64O%QMq z<9}}9Dl?cvB?rl!!qv=6m9zQ7;njp<+sXUvP9O1-yK_TRY}3LW`+^TkSVc?0;rjHQ zoscgeh(L6ek9No=nQxbkF@C~X_;#RC@O1~vabHjb%j;$h!NIbBmH4(a`Ps#(jD#5j zNv-b_Z&D4RGfKA~_a>CCSTUtq)Ht&y*j{&HF+8MO%5p472!X)WAm|*CW_y*l{I5T3 zZ@9TY;dk}X2G-i>9B9hK@g&>41GBSR?n|Y$YOKu86OK?P2(YD z{GJysnho zuxJ&kE|gbmK`0dV8(+4iF}VwK18U?p=D{_cI{gSjb2MhZssjbN?Ke;It)FyS?4TDO zFKp;RVRAyXTttgiKfzwBRJ69s$&Ji&JhdPkTUhc({8(?(V2Kt4YP!4e`BOYiV{_K$ zx2o{yxlwJ#1Sv-}%rr^Hh|OQYBiB`YNvz-e zTv+vD%A}npsg(V(@M-$nWGYRC0JL-pcr(pg;JTMT#{>h(Yac6A=j0|_83Ho4NTq%y?83TPO zYzk~R=P?^v-^W~k?-|+dMjd1E5!vzSf}w5Aa%Cj(7mlv+1wt)H8DZ1Mf7jn(b4uIw zp__h(FAqmgNoGnt$gU+)2Xxe6DpE`Cx#8LOmED%*APX@^5}mZ|$@H*^QzYrW(RF}0*khh<%s}0MKTZS~|cHuokLH^Gb+KolDX4dwLO5I5<`Q?zeGM&vb5|=#Fohjv5 zRtu=L$#GygU-4qEwb-Sr^M2fK2H3Wo?1xv!XV2V6PK!#p4JW*G{(T3F5`l$!wVY$8 zzaZUSBP5RVK4M9WV7x)eZ%!n%=1cMzXKRt4s@K4Q)GnWI&ih*vwWwRw{vcVRBmO9U zz!h5_)%<8yGaQJpagAttw+LNB9(T91bVTn{Of#rHn-Mr4X_EL7`%YtLoS#6m_IJ2~ z7pj8E@ii~V7MV`tnnM{v$Z=*Yh4%BJ-92U@FPW-7NnfM%H*Zj?29BAWo9@ z-vM~S`YZYV)b}`6jb8|H?_@y5rgswdNhxhyhMzRx!U(qYV%|TQ@LhWk^u-Qwdw7OMzn*$a)-JFaGBX>ue>J&?q<34CAY7Mo7zzV~UapB52% zc-j~JCvwH&ph=4vIwW9|45suCrV1#^=uYI;!szE$;JYH7>nB^p0OLUMINDQ3D+OkH z1%PJh7FP?a3%wuvWBYDNVa z|F@s={+~N&#}NU-0?*jhIj=|IUbJ;CM-DVyFIL#BQC7-%FJsWCzjjtQ;#H@&NdkvB zTZGyn-pm=H$GaK>gY8ccSc>y|Ge5c1W1g(W3rCN5JaCfEh1nf)Gbc#nI=7yf2)4TB z49sf7z^uB-lD+^h*VG`w8z~eNjjOCzVVv$|tRpiy+ZJj&&aafTp%1WzMZlQv0t?+q z1I!_4kL77X5F|F1)=qL9=g(q=Fi`fWBZo&8A%Xn)z|$r1QRPYSPy%RY9T##86!W+k zMonNRA(lE1$0WI)z(;9fDsC2+6#Zs~zfitJr) zlQs%xmoX2Lc3(c;01MZg7As3+K7(Y|nx*{C(X@#m_bjJIgwAV2Z>ul!L6!yr2m=L+ zfMEJ-qJZvETucod&JGPa6y*ESQzVKYwLF(}-#gS-H?XY_OyRE;{q8vB3+(NA&;#U; z9DMC2CCuZncUE!^g1|jC{=mH5mGqmJ^<*}3JePe9a z)|f=JH_#50Gfj=4SBk~AAs#G?Dfu{*HCu_C#Vq`e>AT!S-~MXk?nqY}&wv5KTM%R+ z9zizXrNcg=5ITmy`R}KW;pBy~z6QuecE!Fe=)NUK^u;};@UpiU4FM76A0EsWtp!PN zheY)?u9S_H0R?i1^g%Zu2CA4bYhH@;GxrHtS{97f=Rd)`n<;qj$r<)hJ@dJH&-6C_ zuEz_vXNT78Sx@Jl?BPK;RtWmae4yJt246NPy|6bvSoI%c!F8z~ndxxlJ%Lm3SwvtE z@zVM2Y%~Cv#duN+I>|Cmhyn|O$M9be9dlYRowWN$ay|+%%V{tu==@64J9^a+u6t+w z!gcq(wI{G+@_fa1$I>&j{em$YpMSlYUg;N1sSLj973Rs~>9)?fjk_sWd!W_r92kch zJplrGt|B$Mm&YYOy6Wg{oT8=2A_wHa?j!D+v+Sw-6%&Rs7N+G%<05qygF|G7_0N^! zM(0x&NA?iYrMXuU=h(|nPbZNG7kv5O0&I0kmSTzfyS3T}^$9{0_jB;pOh z&@fK{yvkYMqz&$sX8&!h^^yacQx%PZ?)kwl!TE$Mw{(Ed8mhj}ZX| z#j>8v;tLrd_Zx>rsJ~g&tMeh>3Wx0ElCUt4Zd37VUr?N54|@V;a5z4X;3$2UVbE3Y zrJj9dqtUr$9rm~$jSIFB$z#=sH9bo~j>(bCTD^pCw4{-a@Nnfcil#X=v%a8aHC5jw zN#+^@rX`L&-89`fO$oeF>v&Lh!u3^Ls6;+QGv`)Hj_@MekSpkvMsmY-N&LO*o)VK5 z87+-JvFLpds2W;~fX^$yPMJdPYA9xZ8~kfg%y=LEtH0Xgckm%z6@z3xe9 z#klN^U{=-rp3UnPmKTo9a-5SrdWdjj^WG6Lb03C?VygNTZPIci2!=~#kZKyHlhxdP z-@rebXw^0n^UJGt7}_p*hD{DQYZzPY=f@~m5fyz7WQQ3^55rQk_Iq4tuuDFP0b5>m z)8~6-e?(|+x%dZ*-aY|6VL?TgPrY>ful!!=HAVox>7e@wa zVL*4E0@Ge8+y<8Qg<5qOPQI4FkQrHq%{BX7vq)K#p-@KG(#9%1ViV+VHx^D$eq1>r4%YqvT^u_j}XDUu^zdeR;v zH3FT&&bgr;<6W|ID8`1|8J1`Q)g0%Ka@q*ce#b5;Rvg=jEy5UUISL|ag9?;0!d|6E zH3>PUI?DqqZ~Dgm4*VB_iwUBOM;dpt+GFJ(=@WW!RMLJ{z9%0RF?t zC@@ZV+uJW|;A}cVN0g4VoU_wip8&ytMYyY5x>eZkHh&rxup4G}$7=0f72>rgCWs0; zSDAH({|!ohrFD)Sl=GqZ9GlGFGDzO)L+8q3b4a5EU0U>Yk#+AcJ26+w4}o$qQyzn< zk;)^twQWmCVbGau7C$p<-=Aw1cVN@>4ZJ}VD|D~0B!upK-wzHELUa4Qtio&n9o(*{ z7kN16{tz&48z{Cv)Q>RI^?156_mQgHV|N2BfHqof6D7Zr&bRn0;xglOi9iriY59y` z%n>bK$SRxrNL>QtkFbtgHv@kl1>rhEenbf#1%lcDof%L{&mSrL=v+hwb^+l#q9B1l zV6LbS30haI=v_nQsKOBx&eT?*(Xx79Y_1-XK7sQ{hL{;t^xV0_b~k)bq2(7VEk2mh za2_C>j%xc-3OE`a?UW)!#<$c(G#W-z^zTHUL3+iYX?-qeAZ%)b`cI*I)`cGXq{90q z1y~?zuW|2&Iea&(kIvx=S5$W>C%EhA7qRY6v`9!$5I+(a8l<+_*>I(_LZF?p={n^a zjLrYK8lhi}IQ>G~BYta~YxE%*)OsGL1%)D0_+#GGPA7{crBee7{fL>MwAomsguBAL zc&5Ft!0^A{G!Tc;{2i2}{o%F4Y$P{?dxkvYTJK{qhn699QZ%fX*L}>u(yadt+P-@g z{Yl$c%13m(EVjeZPg1;HN3j-nr)uh!j8_;lGriY>n1Sm65Im>wKxz3UwcA)0W0>p- zK}?FY_~vG$+%M0gxV0z7J6;Wg!K`mJR$*Mx$VR49yesC1v%c~E^ljX{I3*%zwysqJ zfW?9YOz!T`Y&;s5fkAU(hsK{e9LCuC@q(U*q7UCQ3fRdBq@HUbLh@e58fzjnMeCrr z1mJ0=>vl;L^;8f}J@#XG`!%PCPD$vt%sh8wKrGqcl)J+K=SzSBRj!I$e0=K*g)FAU z1>Y6@EbSA{c3eJTau_cALwfQMSkV=i0B!bh9G-c?v-`S#rEKt{Ot4B)pXQquHnm%8 z4OI8=;$>p(FjN1ODRx!g7;U_z@dajWGm8xQ6Bt2MNOR0k&Ygf)VdbI-JU(En(Jl#-ZD(vQw&;ki&P;tXaWGQp6dNRefbWNrkYLM!KLgb^YzbkW$ucu(pPEf<-NwtfZ?K=X`zLD&vxv% zkXdW{6W?hAlsWftL6^XLZ+erML1skdHZ2MrK7j z+m19B9kJ3f>;yO>0~LSbi2wE^fYT^7py^^)tQQuX1x3{$AWJ58Mq-^R1D{RrFHlA5 zZXKDrW)0J)1rEE`Ao8^S+6^yMU`eFF%1IRw__}n5aBtP39u!R<=G5F)c`+F*Lqi-j z3|8sc@*SZsr!9Xh37klr-}&Rl-H?D0t)ZnOosSQN82+*db^XXt+vOOWAzdpjUFDzP zyS>5_yw?D?7Hw*5rpF7j{;-_qVbQv4K zyAgU}Gk=qM!LipK`_nZwyqa_T`Ms)t0Y&=x%*u_!2JW*4%xHN(ZvVGuFD4q$Emx=H zp^@1IEqHii%gNzt1@MDJwIaStDZ5=qAw~$oCIQcGxw+{h3x81(LjER;rki*ls#$1d;y}yQUQ|CnaZH~_JxIsY-;$yDjQ$X@Yami@L@a2d_ z=*|*_`OE)A3JSGIM9710NV&tjI0Jnr5auBKzL{b{RsdMT%GFBLwGo%1-|6Vckb7G4 z74tw|j-SL=Fd0PtmNX}UX1-lxVK{Lut7yx(<6Zn9?zirn1;g1v64S42yG?SG43n&* zhM#9&u1-(Uh;^?@YZY{R7*MPDRaI3j4?iHtN&K6lS=EEdEyuDWGU)eScihxrNyWsV zkO)h-Fth!cWmzOLp?DEP(V-WU`3LF82A+USC+*aAZ@s7NkeZrm<#o4NAexN=@ZB=O z*bH9m@hWXq35l@qBmTD)*{;0s6|n1nHD;K@xV-Q6x5|p*qJR};-t8J%fNOz2mqUCw zmdp40FyB`VrVPh-z%icf{dSOX_%X*0kaB>;o!NlD-xOO4jYeDUo=nd6d}D2TuzlVW z&zQ#LF|JFjhwAJV&@ z7|8`3zfNi+4<|$s(IPKY>ue!L|MPJ(8U_+n8jg>~*HGfC+H2-xg#`Rt&H!XO^3R&b zMm|~QACtsRTHD%qxN_J7sf>(rJ|MV>2=0G%U6q@A8_U`xw&OK|ydH}kJoA!ZF`I#_ zpE)v7^&2Bu6dOUUy&-IcSACs9mM_KFf*bX=JteR+05@WUt-T6MB{jIYqQC%QHiU2YG zp{FQy;y`DNvv^EoEonpnF#(j*pZ5YLzF>tZ-ob`}@=I$+l^Q8-R3o zcD1Vtq#79jfS0j?9d%(;y{V+b3uCmgPYQ^og{P^THsMt65i;@AR2)FUfNO`U(2OjN zA8!`FLl%xs+ZB+Fky;46O;?_;I#*iC-|x4M&_%a=xxw{651s|C)#{}HX9suZAYXT7 zKmdRgP6#iS?{(Ak(p$QHsZgUExTp~^*B=?TPl#2{9>&A&?9K~YwAi6)1IP5*!duY zB`=9Hr<8L+lUm$2y|0c}-*TZycma>l*cX#JZ6CJFK8%jn`5B4uZtcZD{SGtyMyEFX zbc^9>V^#Yz&Nb5O3(qO7Nk)k3x{9UwFB>UIr*q8g z)w_;ffrslmGxApg(77mmvpmzCQfn zCCHH&8e5Eq-Mx=G{HyDQ=LV878Vg^P=bvsr6*3+rOjkAiu^5!c&&M(DY9Zqb-tiIf z5!t8$=|}q9Qf%-ViR;98UxpW&KTPCy1z2+W^`+k-TRB9vdE7k%Ay^FLLhkT(Cz~HB zqoZIFa}WR&IyV5QvSnNExoo&OmY#^9d*O8G29X-F&5OI4AB_po2LiPAT1?l=<4&GnFOtMaGP&gq%VYQFM%Hj+9{0<;?3V~oP7aQnA!wp`Y zn)`;I2T2C8gczRb=B+QQUFgV}J#BnGgH4VM(!z_Sh)NBK2mbLoA35^=o`;KeoV`b? z8G!IQ;|v7p7`y!}xCJeu6TE8i!c^D!UCeAX*^RUQ<{$9fRAju`U~{a152^Xr4yeol zlEEqa(oPE#BhL(|I5ztX4ryqDLqCvb5S|bel9dxhSOzXaAwkT_LekN=z*5cyZ$1kG zw;80>#W3(OQFwd?s(qX{LVyy9o2$w!$!clEP!)EIegr4Qxsq9>zkbwHI?-s}>t5`k zpR{5u`VkVNc~gbs`0#{th9~l$=_}n#_tsv9aXR_vaLd4m#zDX7T>^Y52_R7ptxFV$ zo0~s-fed?Z^MJIq)%{RRW+{MXbvpNhhcrx-Cm?SV^zm8xZH|p+(V#W!9FVqQNa`Og zJPA+<8=UYgrpbO=H&4Bfa^sIw`bMcMK&Pc)8 z{RRynGrJbu8HhZ-d&2)`yUz!NxlLcR1AMo{o09*APi*wLcj6z3{5J<_?Z0WNFF}cR_iMcA`roRWb@#d{94i{0sn_HdLldmzTa!HS z$rj(tYl86K^d?{Q17SP@KqS%%G^u&heD*ioA34qubFWmLTy2J>hJ81(^S`Hb#CYD# z17c5Te+o~>N4j6$VKUdyWFyM#Z|S19tFFnwH>9@Mdrq-&C$pmTO?erfb42SAW@^fI zeeciaH#Y{>_uLBIxTXH<#^OAO?ja+vzcmR&A)0npb}T<9DRM6T@oLQaXY%#9V{@bgjOWzkKR+KkJRNBZ@r%q)+*~ryTo5j_?GKk?qIESOn`4b1oXl!$U8&C zkZzUBc7_24xLmDlsW&}$E|Q>4v)eUOdpxhHsyrD`jVXsEHOEq% zNq=5;C^d?rjK+aimU5LK5sucNljTbQj4)QFtY1_Uk>D@BV8o~a(c~t!gkXq!<@ggw zp}ONzq3Ztj*hIZ06{lNv$te@thgV;zKiG5`X+h>^t$)15D4}UBs)WWC4K{)32GWN8;Q9zVS=M zEL(d;WN8&lI(hNmA1>?P%?S5Q*h z`nj0MQORB`wduGMD}vXGzTl0iD0V@%>|1e z=S9^8*6YS8Y_L;J9VYR2Vcis>QpLE(SN0$Oevd8L??$IUf3C`}`?J%k<2zSZQ`{%T zRE=nlvy?IuHv;!h=zQMn!w+j^^?#dATnP0`Ev+-Xd{?}+^L=lH)uMtX68feuF{)V_ z`KS~qMQAQ6tOoKf(Jf-wQ2O6(wHoXdl7?bX+r4CxvSkMt>$loER`3eA>U0!Q+*CH) z1b&i`VXQ0XvXK%+ueclfO=eGLKfJtP(4ajSQjIV5|L<0S@mf)3J9iq|C#W5(_Gq7KJ@5i`Y!qm3$NRLZ9zr++Iz4ZtejS?EM8Ke1u*GYf$RTCn*uGo4ThqBazQQ(!e)ky_Zxff%rsXcGYMvIobg@ z)m}~q&T9C^-$T+qaLt)&!aHq?Dhzh%<{gp`F%s2gleG43!$2DA{>GwZGk&^AY z>Ra8M&pP8^L-CJ4d?649Uw%B6;s?4!Cz)cl=qYEf6TyJLit@+_g}*x&Z=OhNJYQCy5fMT zSeLw$<$QckVk65;v+&%i&e}u+zQh8En)Uc-&cD-%{^2XjC{Sfvfa|ZgNPhonpleq) zH9uxRnlLp#M}f~cQzgIOkcHKtYjVoT$X3@G%>QFX4_%UnJt%qP%)`Gf1|*eY~CO z3?^40GgLMev~u(KOd$gGRR}DQEfXKW=B#IH#!4V`wkL8>vr3Cl7Y&k4l!#CU<62_W zA(I8epgW$*{iZSAqyun`_j(5}kf9h@i{9bCL7O2>f&sE}ZNW8;T>E`D-BC06TMl6Gi zwW|C$aW5sRLnKRr4I4&&A|pMA$zYLQxme&v^L@3bl>w?39e6fZ0M0%z6L%wMY>@S~ z%H-`|dtoNVqHG{DP;I=iH&d`?R2w!Nq2 z%;{kSg4K?!xr_-7~nQ^U(0V+rRwv4LroD)!X5qw6E(vW@x0> z^?A>qY&~?=LAmbI_tEmVeCNhG52WP6zM$DWu9X^Kg4I37J+9zZc+_?BhwTSCu`R^N zyyf-SR{Z8vaQqDlnJ@~q82+m+Rk0mlCFU{_%~%I80E_;gx}Pgwdg?Cn#``S9O9JIa ztUfG6s1v|(5E2^x%$VCA*tIENBtW=tS9M^+e!>3LvZU>}fg_%)`rK$SPnpxcrJt+A z>Y|l-*+p>*dnKtGbx6Q?4vlx@`3RBf&1>3)Z%bO?2V~X3hssb5x6vjA@E+n>6Awt< z<748U5AKpa_a%^?_hWXDY4Y=z-;ySe4sUOK+C(Ft(rkPrP#9#~5W`WhC&Rtt!$ES( z>o{|+*)n!{s7pFX6W>;hRd*7Udnqvyh5$lMB(co{9$uzR?Ylq?gz64>$CA2%-_G^W z0p`|n7Y>at5FJx4c zd=NHJX1s}JrF{>i;E)URk6JMfEX$%Wd^yT5J+hwVCV&RYt6QlaFg|}*^<&i~+Gyhu zd-`IsycaeHb896urehrt9nX*wn3`H!(%YYcUc?*S zACUV|^23NcM_N`H{KXr%#(^#ByYcNU%;mYJ!*A&p~qDljU`7ydQjJKgG@~ zM2xYL+X63oU<6jg{MizDESuf6IQhfeG~$&-@}2JnpCj;{$lCmv`n(cKY%mfy(u+!C z)P~`x{mPp-p}4j65*(R#VMP>xdPZ81V&rUg-+QSxv+DG~Fr6)^YlQebuFce2fm)Ck zjv%)$w)6GQ3^x}Mic0lm=FuJQVNnncE7ty6Oq4KEDns-3+Mb}}*&}AgS}OptqJt#L zwK$b-_E9wJt?qAbQ6OQL62|bwvD*Wm7j%ddp%h8OxpfbiBUBMFR6pCxZ#fd^#OOdtMD%!UgCRanH2qxh$<6a^D6t9c z<%h0rFU7r=g~2%>k(In9x;7Jgs5LBz9ULqcn!;nnR+rjYfE_(_=HY#I<+-*MfbhHryxZFw)v*n$d7 zlWj9^3P;j0dzV#42|crR9b{x=yI(`Gr+huz>8+P7X$w(cd`ns{(<~y-4#zb?KDVQJ zF$#8{T!vg>e4z;vEa z^9-2soER$!B=yYDsNjV3UeeTmU)mD9FL8XTz7-cE@y~COV|tt~nQ?ej4Xk?%tXHfl z%5_K>|0&g#`q8Jk7p6Qr7VA0b)IzY+1Pi$tX4Ek-ThX^TUZNTM=oRjP{XU__i%zH{ zGj!O?%gb)IP}=Tbrl8=Sh@@h>`mjWb0G~_!^_AR{gz>f5-&_Aj^83odO9HI!9nm-8 z{eH$7)jxqO`uV-dY;AS*L?63!-%=w3dJH)|=^YN+nt1`nH*YjKj9S#VZn3k~V_rf1 zceM&5bb}7li1GO*oUOlDWAWT5_rObGfdX=;2w{t~EJiuh*}BFEnH_VqV;$ zH=xCk`%iku&302YK6ExD0(N+KSil?gN5iyx^p8z$Cbu_|gi}lfrl&7sycq>s+7$8iGV%+N>%hVJw|Ax~kK9l4W8 zXamun>CYyy0epNPe39kV$nLT!~US2v2{NhJ6cShN5XZ zthN1IeRqv`|8=9Ju>i(U9)Xo!s`j?f?RdfN@6&x2utkD?ao;gOXM)RU@Zb2g7C9R; zN2DimKb!DwD%Gg~nPZ-!R!VyMaRSkSeqd3Iyy7aY#cLH8e= zRVT2o=2_#KfItXokLZY}GiB&gm{s~0FYU!KYE6?+jLwigC-5q#`N3eR<2+&7BgP^) zTSWZr*x1oz8JnOoHQH2$KpEcbJ7sT+VFC{ao!$4Z}fX~ufKkf-&(mAWQLA%GAI`7 zGd-FA`}=&;ffmBR$H%vYG;sWEvM$?@Wn?0;?KZVeI=+s(eyu@{|L4+x_J09w2a@;| zEfi(0kn$4?FM+VnVXv(ZJ6_0?Je=C{3bFbIz+__T=hf@R* zC=sq?>2(SS;cYl)O7os@pnGoBs#R+4+_|b>zkW$y0wh`pD_X^1zlr#UcpzROs(1y& zE|qR(jweERsX6<&e*L;yzI?eZNNV5`hJswT-Lw)E5F)yWw3B9;5PKDT1?RBrcg~zS znjJcHNLm4j^ZNOgoNsIh%WoBxD-M3)D}EpGzrUuYM(-53W5*72SuR`;B;ppz^?X9> zKmj2dW(?ny=3U_k98Z}tMT^Mn*|RAi>?2|NfM|dyfan0rawmv=s-qR4fDla=5_luO z752q1UI+;0N6bpUG+68-Y<)m5FwX(;aBLCcR^OM$TR7?|AVl+uNGEB2t(;Qv*nfT2 ztXXR2%$aJ!gbBPrL}+m2$Pu-B_inXo*Dh6C8`+)%D-nMrHt8X+5Cw#Ic|y`pnqLaB z{8x4C*ij2fPEL**H*Oq%@v@J=_Hq39aaCMgtcByzqet#jSdRM>u|?N7x+x&Us}qvJ z()?WbO&oXX)Jc0I)2B~Y6DLlj!D1hQ=>s0eo;`cCcszUCWFIZn$AJR})ZV>&RY^&Sdi?mYdwQ$z7h-$<$+aho=6w1_}Vz*u#*XcXB;bM%3w%@7O_;_>X+Gxw=K34cZ5pnwp;77}bU z@&n;=_Z&Qtf`S6oty?!5MfUO1`nY@duFB8P*Pe%gKM3CuTT{W&NdY0kBP4yL`G)X& z!hXYt57*0nr%jut1`i%gqsTs*t&a;AE~tI`_UUE0*j8463Gv?v|1379ildPNLWEaH zP?fP*I10zTd-v8J37DOoO{2&@8r?@}X{q)$z#BJixTjAEbHuh(bM#R_h{O?+9BEbw zbDd)~dGh24QEQ7vk$uE^&9!US)WL%XBQ$KtPT_lUT+Go%0U;7sNQOzXQaK5Uz<>b* z)TBw1bQNGM0mg2LG`#G?1t!KJ7KaZXR)-E9Qdh2AF`q^P;QL}r&T@27K#1fK5>#bG z^ zZB+fHR*O}AIeCpT&efrSwCSV_!1Yf&$tu8lm^X5(SX_QmnBrFko z@r;DcDkzvY`Le2C*)`989!rGN($X{s4jib7bDcYPPHF)`IO@`+OX~9F%et+stklF( zTMukk^(&@6;T#4*0U;7qOnOLzZ1~~AEMcZ2F8>FJ2QuU#Bq*~2A{5rQZ{LKgDvT<& zRaI3w^n_9&AVdNg-wz%<@OsN9!gInhA*vUim9c!AcNakcA(By4UYBNw!wiRSc)hl5 z+qP=-=+UZY&z^dvSC1Y&G`n~2uIE-hO{|c*b?cTodGe%s z`t+&apExQ+DfDvTc~Hjfr@U(j3J5{?iwb7iv9}4tWuVg{6gEEE`ye35QGo~J5-Y&k zw{Ner>(RDr*G|6MHCaL?TLOr)@99 z>rxE16}EPW{eM5_{7DRwe+!>GM69XKVXY$!HNr>22TqIhZY+y}+W`-vTE^NfemevO zgdhllNP2vpmw+G$f}ns91VIoK5P~2Gf&xMi1VK<_{udim}>.{ext}", + "path": "{@folder}/{@file}" + }, "__dynamic_keys_labels__": { "maya2unreal": "Maya to Unreal", "simpleUnrealTextureHero": "Simple Unreal Texture - Hero", - "simpleUnrealTexture": "Simple Unreal Texture" + "simpleUnrealTexture": "Simple Unreal Texture", + "online": "online" } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/celaction.json b/openpype/settings/defaults/project_settings/celaction.json index a4a321fb271..dbe5625f06f 100644 --- a/openpype/settings/defaults/project_settings/celaction.json +++ b/openpype/settings/defaults/project_settings/celaction.json @@ -1,13 +1,9 @@ { "publish": { - "ExtractCelactionDeadline": { - "enabled": true, - "deadline_department": "", - "deadline_priority": 50, - "deadline_pool": "", - "deadline_pool_secondary": "", - "deadline_group": "", - "deadline_chunk_size": 10 + "CollectRenderPath": { + "output_extension": "png", + "anatomy_template_key_render_files": "render", + "anatomy_template_key_metadata": "render" } } } \ No newline at end of file diff --git a/openpype/settings/defaults/project_settings/deadline.json b/openpype/settings/defaults/project_settings/deadline.json index a6e7b4a94a5..6e1c0f3540a 100644 --- a/openpype/settings/defaults/project_settings/deadline.json +++ b/openpype/settings/defaults/project_settings/deadline.json @@ -70,6 +70,16 @@ "department": "", "multiprocess": true }, + "CelactionSubmitDeadline": { + "enabled": true, + "deadline_department": "", + "deadline_priority": 50, + "deadline_pool": "", + "deadline_pool_secondary": "", + "deadline_group": "", + "deadline_chunk_size": 10, + "deadline_job_delay": "00:00:00:00" + }, "ProcessSubmittedJobOnFarm": { "enabled": true, "deadline_department": "", diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 7daa4afa79d..89d7cf08b74 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -24,6 +24,10 @@ ], "skip_hosts_headless_publish": [] }, + "collect_comment_per_instance": { + "enabled": false, + "families": [] + }, "ValidateEditorialAssetName": { "enabled": true, "optional": false @@ -288,6 +292,17 @@ "task_types": [], "tasks": [], "template_name": "maya2unreal" + }, + { + "families": [ + "online" + ], + "hosts": [ + "traypublisher" + ], + "task_types": [], + "tasks": [], + "template_name": "online" } ] }, diff --git a/openpype/settings/defaults/project_settings/maya.json b/openpype/settings/defaults/project_settings/maya.json index 78126283d06..0e2f6b8b62e 100644 --- a/openpype/settings/defaults/project_settings/maya.json +++ b/openpype/settings/defaults/project_settings/maya.json @@ -179,7 +179,21 @@ "enabled": true, "defaults": [ "Main" - ] + ], + "expandProcedurals": false, + "motionBlur": true, + "motionBlurKeys": 2, + "motionBlurLength": 0.5, + "maskOptions": false, + "maskCamera": false, + "maskLight": false, + "maskShape": false, + "maskShader": false, + "maskOverride": false, + "maskDriver": false, + "maskFilter": false, + "maskColor_manager": false, + "maskOperator": false }, "CreateAssembly": { "enabled": true, @@ -258,6 +272,9 @@ "CollectFbxCamera": { "enabled": false }, + "CollectGLTF": { + "enabled": false + }, "ValidateInstanceInContext": { "enabled": true, "optional": true, @@ -1022,4 +1039,4 @@ "ValidateNoAnimation": false } } -} \ No newline at end of file +} diff --git a/openpype/settings/defaults/project_settings/tvpaint.json b/openpype/settings/defaults/project_settings/tvpaint.json index 88b5a598cd9..e03ce320303 100644 --- a/openpype/settings/defaults/project_settings/tvpaint.json +++ b/openpype/settings/defaults/project_settings/tvpaint.json @@ -11,6 +11,11 @@ 255, 255, 255 + ], + "families_to_review": [ + "review", + "renderlayer", + "renderscene" ] }, "ValidateProjectSettings": { diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 03499a8567a..936407a49b9 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -114,6 +114,35 @@ } } }, + "3dsmax": { + "enabled": true, + "label": "3ds max", + "icon": "{}/app_icons/3dsmax.png", + "host_name": "max", + "environment": { + "ADSK_3DSMAX_STARTUPSCRIPTS_ADDON_DIR": "{OPENPYPE_ROOT}\\openpype\\hosts\\max\\startup" + }, + "variants": { + "2023": { + "use_python_2": false, + "executables": { + "windows": [ + "C:\\Program Files\\Autodesk\\3ds Max 2023\\3dsmax.exe" + ], + "darwin": [], + "linux": [] + }, + "arguments": { + "windows": [], + "darwin": [], + "linux": [] + }, + "environment": { + "3DSMAX_VERSION": "2023" + } + } + } + }, "flame": { "enabled": true, "label": "Flame", @@ -1268,12 +1297,12 @@ "CELACTION_TEMPLATE": "{OPENPYPE_REPOS_ROOT}/openpype/hosts/celaction/celaction_template_scene.scn" }, "variants": { - "local": { + "current": { "enabled": true, - "variant_label": "Local", + "variant_label": "Current", "use_python_2": false, "executables": { - "windows": [], + "windows": ["C:/Program Files/CelAction/CelAction2D Studio/CelAction2D.exe"], "darwin": [], "linux": [] }, diff --git a/openpype/settings/entities/enum_entity.py b/openpype/settings/entities/enum_entity.py index defe4aa1f00..c0c103ea10e 100644 --- a/openpype/settings/entities/enum_entity.py +++ b/openpype/settings/entities/enum_entity.py @@ -152,6 +152,7 @@ class HostsEnumEntity(BaseEnumEntity): schema_types = ["hosts-enum"] all_host_names = [ + "max", "aftereffects", "blender", "celaction", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json b/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json index 500e5b2298d..15d9350c847 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_celaction.json @@ -14,45 +14,24 @@ { "type": "dict", "collapsible": true, - "checkbox_key": "enabled", - "key": "ExtractCelactionDeadline", - "label": "ExtractCelactionDeadline", + "key": "CollectRenderPath", + "label": "CollectRenderPath", "is_group": true, "children": [ - { - "type": "boolean", - "key": "enabled", - "label": "Enabled" - }, { "type": "text", - "key": "deadline_department", - "label": "Deadline apartment" - }, - { - "type": "number", - "key": "deadline_priority", - "label": "Deadline priority" + "key": "output_extension", + "label": "Output render file extension" }, { "type": "text", - "key": "deadline_pool", - "label": "Deadline pool" + "key": "anatomy_template_key_render_files", + "label": "Anatomy template key: render files" }, { "type": "text", - "key": "deadline_pool_secondary", - "label": "Deadline pool (secondary)" - }, - { - "type": "text", - "key": "deadline_group", - "label": "Deadline Group" - }, - { - "type": "number", - "key": "deadline_chunk_size", - "label": "Deadline Chunk size" + "key": "anatomy_template_key_metadata", + "label": "Anatomy template key: metadata job file" } ] } diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json index cd1741ba8b3..69f81ed6829 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_deadline.json @@ -387,6 +387,56 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "checkbox_key": "enabled", + "key": "CelactionSubmitDeadline", + "label": "Celaction Submit Deadline", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "text", + "key": "deadline_department", + "label": "Deadline apartment" + }, + { + "type": "number", + "key": "deadline_priority", + "label": "Deadline priority" + }, + { + "type": "text", + "key": "deadline_pool", + "label": "Deadline pool" + }, + { + "type": "text", + "key": "deadline_pool_secondary", + "label": "Deadline pool (secondary)" + }, + { + "type": "text", + "key": "deadline_group", + "label": "Deadline Group" + }, + { + "type": "number", + "key": "deadline_chunk_size", + "label": "Deadline Chunk size" + }, + { + "type": "text", + "key": "deadline_job_delay", + "label": "Delay job (timecode dd:hh:mm:ss)" + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json index 20fe5b08555..61342ef738a 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_tvpaint.json @@ -56,6 +56,18 @@ "key": "review_bg", "label": "Review BG color", "use_alpha": false + }, + { + "type": "enum", + "key": "families_to_review", + "label": "Families to review", + "multiselection": true, + "enum_items": [ + {"review": "review"}, + {"renderpass": "renderPass"}, + {"renderlayer": "renderLayer"}, + {"renderscene": "renderScene"} + ] } ] }, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json index a2a566da0e7..3667c9d5d85 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_anatomy_attributes.json @@ -16,22 +16,26 @@ { "type": "number", "key": "frameStart", - "label": "Frame Start" + "label": "Frame Start", + "maximum": 999999999 }, { "type": "number", "key": "frameEnd", - "label": "Frame End" + "label": "Frame End", + "maximum": 999999999 }, { "type": "number", "key": "clipIn", - "label": "Clip In" + "label": "Clip In", + "maximum": 999999999 }, { "type": "number", "key": "clipOut", - "label": "Clip Out" + "label": "Clip Out", + "maximum": 999999999 }, { "type": "number", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index 742437fbded..f2ada5fd8dd 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -60,6 +60,27 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "collect_comment_per_instance", + "label": "Collect comment per instance", + "checkbox_key": "enabled", + "is_group": true, + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "key": "families", + "label": "Families", + "type": "list", + "object_type": "text" + } + ] + }, { "type": "dict", "collapsible": true, diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json index 231554d96e2..e1a30826168 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_create.json @@ -230,7 +230,98 @@ } ] }, - + { + "type": "dict", + "collapsible": true, + "key": "CreateAss", + "label": "Create Ass", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "list", + "key": "defaults", + "label": "Default Subsets", + "object_type": "text" + }, + { + "type": "boolean", + "key": "expandProcedurals", + "label": "Expand Procedurals" + }, + { + "type": "boolean", + "key": "motionBlur", + "label": "Motion Blur" + }, + { + "type": "number", + "key": "motionBlurKeys", + "label": "Motion Blur Keys", + "minimum": 0 + }, + { + "type": "number", + "key": "motionBlurLength", + "label": "Motion Blur Length", + "decimal": 3 + }, + { + "type": "boolean", + "key": "maskOptions", + "label": "Mask Options" + }, + { + "type": "boolean", + "key": "maskCamera", + "label": "Mask Camera" + }, + { + "type": "boolean", + "key": "maskLight", + "label": "Mask Light" + }, + { + "type": "boolean", + "key": "maskShape", + "label": "Mask Shape" + }, + { + "type": "boolean", + "key": "maskShader", + "label": "Mask Shader" + }, + { + "type": "boolean", + "key": "maskOverride", + "label": "Mask Override" + }, + { + "type": "boolean", + "key": "maskDriver", + "label": "Mask Driver" + }, + { + "type": "boolean", + "key": "maskFilter", + "label": "Mask Filter" + }, + { + "type": "boolean", + "key": "maskColor_manager", + "label": "Mask Color Manager" + }, + { + "type": "boolean", + "key": "maskOperator", + "label": "Mask Operator" + } + ] + }, { "type": "schema_template", "name": "template_create_plugin", diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json index 2c6260db306..9aaff248abb 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_publish.json @@ -35,6 +35,20 @@ } ] }, + { + "type": "dict", + "collapsible": true, + "key": "CollectGLTF", + "label": "Collect Assets for GLTF/GLB export", + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + } + ] + }, { "type": "splitter" }, @@ -62,7 +76,7 @@ } ] }, - { + { "type": "dict", "collapsible": true, "key": "ValidateFrameRange", diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json new file mode 100644 index 00000000000..f7c57298af3 --- /dev/null +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_3dsmax.json @@ -0,0 +1,39 @@ +{ + "type": "dict", + "key": "3dsmax", + "label": "Autodesk 3ds Max", + "collapsible": true, + "checkbox_key": "enabled", + "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, + { + "type": "schema_template", + "name": "template_host_unchangables" + }, + { + "key": "environment", + "label": "Environment", + "type": "raw-json" + }, + { + "type": "dict-modifiable", + "key": "variants", + "collapsible_key": true, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items" + } + ] + } + } + ] +} diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json index 82be15c3b08..b104e3bb82c 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_celaction.json @@ -28,8 +28,8 @@ "name": "template_host_variant", "template_data": [ { - "app_variant_label": "Local", - "app_variant": "local" + "app_variant_label": "Current", + "app_variant": "current" } ] } diff --git a/openpype/settings/entities/schemas/system_schema/schema_applications.json b/openpype/settings/entities/schemas/system_schema/schema_applications.json index 20be33320d5..36c58114965 100644 --- a/openpype/settings/entities/schemas/system_schema/schema_applications.json +++ b/openpype/settings/entities/schemas/system_schema/schema_applications.json @@ -9,6 +9,10 @@ "type": "schema", "name": "schema_maya" }, + { + "type": "schema", + "name": "schema_3dsmax" + }, { "type": "schema", "name": "schema_flame" diff --git a/openpype/style/style.css b/openpype/style/style.css index 887c044daef..a7a48cdb9d7 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -1126,6 +1126,10 @@ ValidationArtistMessage QLabel { background: transparent; } +CreateNextPageOverlay { + font-size: 32pt; +} + /* Settings - NOT USED YET - we need to define font family for settings UI */ diff --git a/openpype/tools/attribute_defs/widgets.py b/openpype/tools/attribute_defs/widgets.py index dc697b08a68..1ffb3d37994 100644 --- a/openpype/tools/attribute_defs/widgets.py +++ b/openpype/tools/attribute_defs/widgets.py @@ -6,6 +6,7 @@ from openpype.lib.attribute_definitions import ( AbtractAttrDef, UnknownDef, + HiddenDef, NumberDef, TextDef, EnumDef, @@ -22,6 +23,16 @@ def create_widget_for_attr_def(attr_def, parent=None): + widget = _create_widget_for_attr_def(attr_def, parent) + if attr_def.hidden: + widget.setVisible(False) + + if attr_def.disabled: + widget.setEnabled(False) + return widget + + +def _create_widget_for_attr_def(attr_def, parent=None): if not isinstance(attr_def, AbtractAttrDef): raise TypeError("Unexpected type \"{}\" expected \"{}\"".format( str(type(attr_def)), AbtractAttrDef @@ -42,6 +53,9 @@ def create_widget_for_attr_def(attr_def, parent=None): if isinstance(attr_def, UnknownDef): return UnknownAttrWidget(attr_def, parent) + if isinstance(attr_def, HiddenDef): + return HiddenAttrWidget(attr_def, parent) + if isinstance(attr_def, FileDef): return FileAttrWidget(attr_def, parent) @@ -115,6 +129,10 @@ def add_attr_defs(self, attr_defs): self._current_keys.add(attr_def.key) widget = create_widget_for_attr_def(attr_def, self) + self._widgets.append(widget) + + if attr_def.hidden: + continue expand_cols = 2 if attr_def.is_value_def and attr_def.is_label_horizontal: @@ -133,7 +151,6 @@ def add_attr_defs(self, attr_defs): layout.addWidget( widget, row, col_num, 1, expand_cols ) - self._widgets.append(widget) row += 1 def set_value(self, value): @@ -459,6 +476,29 @@ def set_value(self, value, multivalue=False): self._input_widget.setText(str_value) +class HiddenAttrWidget(_BaseAttrDefWidget): + def _ui_init(self): + self.setVisible(False) + self._value = None + self._multivalue = False + + def setVisible(self, visible): + if visible: + visible = False + super(HiddenAttrWidget, self).setVisible(visible) + + def current_value(self): + if self._multivalue: + raise ValueError("{} can't output for multivalue.".format( + self.__class__.__name__ + )) + return self._value + + def set_value(self, value, multivalue=False): + self._value = copy.deepcopy(value) + self._multivalue = multivalue + + class FileAttrWidget(_BaseAttrDefWidget): def _ui_init(self): input_widget = FilesWidget( diff --git a/openpype/tools/creator/model.py b/openpype/tools/creator/model.py index d3d60b96f2f..307993103bf 100644 --- a/openpype/tools/creator/model.py +++ b/openpype/tools/creator/model.py @@ -36,7 +36,7 @@ def reset(self): if not items: item = QtGui.QStandardItem("No registered families") item.setEnabled(False) - item.setData(QtCore.Qt.ItemIsEnabled, False) + item.setData(False, QtCore.Qt.ItemIsEnabled) items.append(item) self.invisibleRootItem().appendRows(items) diff --git a/openpype/tools/project_manager/project_manager/view.py b/openpype/tools/project_manager/project_manager/view.py index cca892ef727..8d1fe54e831 100644 --- a/openpype/tools/project_manager/project_manager/view.py +++ b/openpype/tools/project_manager/project_manager/view.py @@ -28,7 +28,7 @@ class NameDef: class NumberDef: def __init__(self, minimum=None, maximum=None, decimals=None): self.minimum = 0 if minimum is None else minimum - self.maximum = 999999 if maximum is None else maximum + self.maximum = 999999999 if maximum is None else maximum self.decimals = 0 if decimals is None else decimals diff --git a/openpype/tools/publisher/widgets/__init__.py b/openpype/tools/publisher/widgets/__init__.py index a02c69d5e06..042985b007e 100644 --- a/openpype/tools/publisher/widgets/__init__.py +++ b/openpype/tools/publisher/widgets/__init__.py @@ -8,6 +8,7 @@ ResetBtn, ValidateBtn, PublishBtn, + CreateNextPageOverlay, ) from .help_widget import ( HelpButton, @@ -28,6 +29,7 @@ "ResetBtn", "ValidateBtn", "PublishBtn", + "CreateNextPageOverlay", "HelpButton", "HelpDialog", diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 9fd2bf0824f..09635d1a15e 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -674,9 +674,16 @@ def refresh(self): instances_by_group[group_name] ) - self._update_ordered_group_nameS() + self._update_ordered_group_names() - def _update_ordered_group_nameS(self): + def has_items(self): + if self._convertor_items_group is not None: + return True + if self._widgets_by_group: + return True + return False + + def _update_ordered_group_names(self): ordered_group_names = [CONTEXT_GROUP] for idx in range(self._content_layout.count()): if idx > 0: diff --git a/openpype/tools/publisher/widgets/list_view_widgets.py b/openpype/tools/publisher/widgets/list_view_widgets.py index 32d84862f0f..1cdb4cdcdbf 100644 --- a/openpype/tools/publisher/widgets/list_view_widgets.py +++ b/openpype/tools/publisher/widgets/list_view_widgets.py @@ -912,6 +912,13 @@ def _on_group_toggle_request(self, group_name, state): if not self._instance_view.isExpanded(proxy_index): self._instance_view.expand(proxy_index) + def has_items(self): + if self._convertor_group_widget is not None: + return True + if self._group_items: + return True + return False + def get_selected_items(self): """Get selected instance ids and context selection. diff --git a/openpype/tools/publisher/widgets/overview_widget.py b/openpype/tools/publisher/widgets/overview_widget.py index be3839b90bf..b1aeda9cd4e 100644 --- a/openpype/tools/publisher/widgets/overview_widget.py +++ b/openpype/tools/publisher/widgets/overview_widget.py @@ -195,6 +195,20 @@ def set_state(self, new_state, animate): self._subset_views_widget.setMaximumWidth(view_width) self._change_anim.start() + def get_subset_views_geo(self): + parent = self._subset_views_widget.parent() + global_pos = parent.mapToGlobal(self._subset_views_widget.pos()) + return QtCore.QRect( + global_pos.x(), + global_pos.y(), + self._subset_views_widget.width(), + self._subset_views_widget.height() + ) + + def has_items(self): + view = self._subset_views_layout.currentWidget() + return view.has_items() + def _on_create_clicked(self): """Pass signal to parent widget which should care about changing state. diff --git a/openpype/tools/publisher/widgets/tabs_widget.py b/openpype/tools/publisher/widgets/tabs_widget.py index 84638a002cd..d8ad19cfc09 100644 --- a/openpype/tools/publisher/widgets/tabs_widget.py +++ b/openpype/tools/publisher/widgets/tabs_widget.py @@ -54,6 +54,9 @@ def __init__(self, parent=None): self._buttons_by_identifier = {} def is_current_tab(self, identifier): + if isinstance(identifier, int): + identifier = self.get_tab_by_index(identifier) + if isinstance(identifier, PublisherTabBtn): identifier = identifier.identifier return self._current_identifier == identifier @@ -68,7 +71,16 @@ def add_tab(self, label, identifier): self.set_current_tab(identifier) return button + def get_tab_by_index(self, index): + if 0 >= index < self._btns_layout.count(): + item = self._btns_layout.itemAt(index) + return item.widget() + return None + def set_current_tab(self, identifier): + if isinstance(identifier, int): + identifier = self.get_tab_by_index(identifier) + if isinstance(identifier, PublisherTabBtn): identifier = identifier.identifier diff --git a/openpype/tools/publisher/widgets/validations_widget.py b/openpype/tools/publisher/widgets/validations_widget.py index 8c483e80886..935a12bc730 100644 --- a/openpype/tools/publisher/widgets/validations_widget.py +++ b/openpype/tools/publisher/widgets/validations_widget.py @@ -511,7 +511,7 @@ def __init__(self, controller, parent): ) # After success publishing publish_started_widget = ValidationArtistMessage( - "Publishing went smoothly", self + "So far so good", self ) # After success publishing publish_stop_ok_widget = ValidationArtistMessage( diff --git a/openpype/tools/publisher/widgets/widgets.py b/openpype/tools/publisher/widgets/widgets.py index 447fd7bc121..4b9626154d6 100644 --- a/openpype/tools/publisher/widgets/widgets.py +++ b/openpype/tools/publisher/widgets/widgets.py @@ -9,6 +9,7 @@ from Qt import QtWidgets, QtCore, QtGui import qtawesome +from openpype.lib.attribute_definitions import UnknownDef from openpype.tools.attribute_defs import create_widget_for_attr_def from openpype.tools import resources from openpype.tools.flickcharm import FlickCharm @@ -305,6 +306,20 @@ def refresh(self): "{} Method 'refresh' is not implemented." ).format(self.__class__.__name__)) + def has_items(self): + """View has at least one item. + + This is more a question for controller but is called from widget + which should probably should not use controller. + + Returns: + bool: There is at least one instance or conversion item. + """ + + raise NotImplementedError(( + "{} Method 'has_items' is not implemented." + ).format(self.__class__.__name__)) + def get_selected_items(self): """Selected instances required for callbacks. @@ -578,6 +593,11 @@ def __init__(self, controller, parent): self._text = None + # Make sure combobox is extended horizontally + size_policy = self.sizePolicy() + size_policy.setHorizontalPolicy(size_policy.MinimumExpanding) + self.setSizePolicy(size_policy) + def set_invalid_empty_task(self, invalid=True): self._proxy_model.set_filter_empty(invalid) if invalid: @@ -1180,7 +1200,7 @@ def set_current_instances(self, instances): """Set currently selected instances. Args: - instances(list): List of selected instances. + instances(List[CreatedInstance]): List of selected instances. Empty instances tells that nothing or context is selected. """ self._set_btns_visible(False) @@ -1303,6 +1323,13 @@ def set_current_instances(self, instances): else: widget.set_value(values, True) + widget.value_changed.connect(self._input_value_changed) + self._attr_def_id_to_instances[attr_def.id] = attr_instances + self._attr_def_id_to_attr_def[attr_def.id] = attr_def + + if attr_def.hidden: + continue + expand_cols = 2 if attr_def.is_value_def and attr_def.is_label_horizontal: expand_cols = 1 @@ -1321,13 +1348,8 @@ def set_current_instances(self, instances): content_layout.addWidget( widget, row, col_num, 1, expand_cols ) - row += 1 - widget.value_changed.connect(self._input_value_changed) - self._attr_def_id_to_instances[attr_def.id] = attr_instances - self._attr_def_id_to_attr_def[attr_def.id] = attr_def - self._scroll_area.setWidget(content_widget) self._content_widget = content_widget @@ -1421,8 +1443,17 @@ def set_current_instances(self, instances, context_selected): widget = create_widget_for_attr_def( attr_def, content_widget ) - label = attr_def.label or attr_def.key - content_layout.addRow(label, widget) + hidden_widget = attr_def.hidden + # Hide unknown values of publish plugins + # - The keys in most of cases does not represent what would + # label represent + if isinstance(attr_def, UnknownDef): + widget.setVisible(False) + hidden_widget = True + + if not hidden_widget: + label = attr_def.label or attr_def.key + content_layout.addRow(label, widget) widget.value_changed.connect(self._input_value_changed) @@ -1614,6 +1645,7 @@ def set_current_instances( instances(List[CreatedInstance]): List of currently selected instances. context_selected(bool): Is context selected. + convertor_identifiers(List[str]): Identifiers of convert items. """ all_valid = True @@ -1708,3 +1740,159 @@ def _update_thumbnails(self): self._thumbnail_widget.setVisible(True) self._thumbnail_widget.set_current_thumbnails(thumbnail_paths) + + +class CreateNextPageOverlay(QtWidgets.QWidget): + clicked = QtCore.Signal() + + def __init__(self, parent): + super(CreateNextPageOverlay, self).__init__(parent) + self.setCursor(QtCore.Qt.PointingHandCursor) + self._arrow_color = ( + get_objected_colors("font").get_qcolor() + ) + self._bg_color = ( + get_objected_colors("bg-buttons").get_qcolor() + ) + + change_anim = QtCore.QVariantAnimation() + change_anim.setStartValue(0.0) + change_anim.setEndValue(1.0) + change_anim.setDuration(200) + change_anim.setEasingCurve(QtCore.QEasingCurve.OutCubic) + + change_anim.valueChanged.connect(self._on_anim) + + self._change_anim = change_anim + self._is_visible = None + self._anim_value = 0.0 + self._increasing = False + self._under_mouse = None + self._handle_show_on_own = True + self._mouse_pressed = False + self.set_visible(True) + + def set_increasing(self, increasing): + if self._increasing is increasing: + return + self._increasing = increasing + if increasing: + self._change_anim.setDirection(self._change_anim.Forward) + else: + self._change_anim.setDirection(self._change_anim.Backward) + + if self._change_anim.state() != self._change_anim.Running: + self._change_anim.start() + + def set_visible(self, visible): + if self._is_visible is visible: + return + + self._is_visible = visible + if not visible: + self.set_increasing(False) + if not self._is_anim_finished(): + return + + self.setVisible(visible) + self._check_anim_timer() + + def _is_anim_finished(self): + if self._increasing: + return self._anim_value == 1.0 + return self._anim_value == 0.0 + + def _on_anim(self, value): + self._check_anim_timer() + + self._anim_value = value + + self.update() + + if not self._is_anim_finished(): + return + + if not self._is_visible: + self.setVisible(False) + + def set_under_mouse(self, under_mouse): + if self._under_mouse is under_mouse: + return + + self._under_mouse = under_mouse + self.set_increasing(under_mouse) + + def _is_under_mouse(self): + mouse_pos = self.mapFromGlobal(QtGui.QCursor.pos()) + under_mouse = self.rect().contains(mouse_pos) + return under_mouse + + def _check_anim_timer(self): + if not self.isVisible(): + return + + self.set_increasing(self._under_mouse) + + def mousePressEvent(self, event): + if event.button() == QtCore.Qt.LeftButton: + self._mouse_pressed = True + super(CreateNextPageOverlay, self).mousePressEvent(event) + + def mouseReleaseEvent(self, event): + if self._mouse_pressed: + self._mouse_pressed = False + if self.rect().contains(event.pos()): + self.clicked.emit() + + super(CreateNextPageOverlay, self).mouseReleaseEvent(event) + + def paintEvent(self, event): + painter = QtGui.QPainter() + painter.begin(self) + if self._anim_value == 0.0: + painter.end() + return + + painter.setClipRect(event.rect()) + painter.setRenderHints( + painter.Antialiasing + | painter.SmoothPixmapTransform + ) + + painter.setPen(QtCore.Qt.NoPen) + + rect = QtCore.QRect(self.rect()) + rect_width = rect.width() + rect_height = rect.height() + radius = rect_width * 0.2 + + x_offset = 0 + y_offset = 0 + if self._anim_value != 1.0: + x_offset += rect_width - (rect_width * self._anim_value) + + arrow_height = rect_height * 0.4 + arrow_half_height = arrow_height * 0.5 + arrow_x_start = x_offset + ((rect_width - arrow_half_height) * 0.5) + arrow_x_end = arrow_x_start + arrow_half_height + center_y = rect.center().y() + + painter.setBrush(self._bg_color) + painter.drawRoundedRect( + x_offset, y_offset, + rect_width + radius, rect_height, + radius, radius + ) + + src_arrow_path = QtGui.QPainterPath() + src_arrow_path.moveTo(arrow_x_start, center_y - arrow_half_height) + src_arrow_path.lineTo(arrow_x_end, center_y) + src_arrow_path.lineTo(arrow_x_start, center_y + arrow_half_height) + + arrow_stroker = QtGui.QPainterPathStroker() + arrow_stroker.setWidth(min(4, arrow_half_height * 0.2)) + arrow_path = arrow_stroker.createStroke(src_arrow_path) + + painter.fillPath(arrow_path, self._arrow_color) + + painter.end() diff --git a/openpype/tools/publisher/window.py b/openpype/tools/publisher/window.py index 5875f7aa68d..0f7fd2c7e33 100644 --- a/openpype/tools/publisher/window.py +++ b/openpype/tools/publisher/window.py @@ -29,6 +29,8 @@ HelpButton, HelpDialog, + + CreateNextPageOverlay, ) @@ -154,7 +156,7 @@ def __init__(self, parent=None, controller=None, reset_on_show=None): footer_layout.addWidget(footer_bottom_widget, 0) # Content - # - wrap stacked widget under one more widget to be able propagate + # - wrap stacked widget under one more widget to be able to propagate # margins (QStackedLayout can't have margins) content_widget = QtWidgets.QWidget(under_publish_widget) @@ -225,8 +227,8 @@ def __init__(self, parent=None, controller=None, reset_on_show=None): # Floating publish frame publish_frame = PublishFrame(controller, self.footer_border, self) - # Timer started on show -> connected to timer counter - # - helps to deffer on show logic by 3 event loops + create_overlay_button = CreateNextPageOverlay(self) + show_timer = QtCore.QTimer() show_timer.setInterval(1) show_timer.timeout.connect(self._on_show_timer) @@ -255,6 +257,9 @@ def __init__(self, parent=None, controller=None, reset_on_show=None): publish_btn.clicked.connect(self._on_publish_clicked) publish_frame.details_page_requested.connect(self._go_to_details_tab) + create_overlay_button.clicked.connect( + self._on_create_overlay_button_click + ) controller.event_system.add_callback( "instances.refresh.finished", self._on_instances_refresh @@ -262,6 +267,9 @@ def __init__(self, parent=None, controller=None, reset_on_show=None): controller.event_system.add_callback( "publish.reset.finished", self._on_publish_reset ) + controller.event_system.add_callback( + "controller.reset.finished", self._on_controller_reset + ) controller.event_system.add_callback( "publish.process.started", self._on_publish_start ) @@ -310,6 +318,7 @@ def __init__(self, parent=None, controller=None, reset_on_show=None): self._publish_overlay = publish_overlay self._publish_frame = publish_frame + self._content_widget = content_widget self._content_stacked_layout = content_stacked_layout self._overview_widget = overview_widget @@ -331,25 +340,39 @@ def __init__(self, parent=None, controller=None, reset_on_show=None): self._controller = controller self._first_show = True + self._first_reset = True # This is a little bit confusing but 'reset_on_first_show' is too long - # forin init + # for init self._reset_on_first_show = reset_on_show self._reset_on_show = True self._publish_frame_visible = None + self._tab_on_reset = None self._error_messages_to_show = collections.deque() self._errors_dialog_message_timer = errors_dialog_message_timer self._set_publish_visibility(False) + self._create_overlay_button = create_overlay_button + self._app_event_listener_installed = False + self._show_timer = show_timer self._show_counter = 0 + self._window_is_visible = False @property def controller(self): return self._controller + def make_sure_is_visible(self): + if self._window_is_visible: + self.setWindowState(QtCore.Qt.ActiveWindow) + + else: + self.show() + def showEvent(self, event): + self._window_is_visible = True super(PublisherWindow, self).showEvent(event) if self._first_show: self._first_show = False @@ -360,6 +383,38 @@ def showEvent(self, event): def resizeEvent(self, event): super(PublisherWindow, self).resizeEvent(event) self._update_publish_frame_rect() + self._update_create_overlay_size() + + def closeEvent(self, event): + self._window_is_visible = False + self._uninstall_app_event_listener() + self.save_changes() + self._reset_on_show = True + self._controller.clear_thumbnail_temp_dir_path() + super(PublisherWindow, self).closeEvent(event) + + def leaveEvent(self, event): + super(PublisherWindow, self).leaveEvent(event) + self._update_create_overlay_visibility() + + def eventFilter(self, obj, event): + if event.type() == QtCore.QEvent.MouseMove: + self._update_create_overlay_visibility(event.globalPos()) + return super(PublisherWindow, self).eventFilter(obj, event) + + def _install_app_event_listener(self): + if self._app_event_listener_installed: + return + self._app_event_listener_installed = True + app = QtWidgets.QApplication.instance() + app.installEventFilter(self) + + def _uninstall_app_event_listener(self): + if not self._app_event_listener_installed: + return + self._app_event_listener_installed = False + app = QtWidgets.QApplication.instance() + app.removeEventFilter(self) def keyPressEvent(self, event): # Ignore escape button to close window @@ -390,17 +445,16 @@ def _on_show_timer(self): # Reset counter when done for next show event self._show_counter = 0 + self._update_create_overlay_size() + self._update_create_overlay_visibility() + if self._is_on_create_tab(): + self._install_app_event_listener() + # Reset if requested if self._reset_on_show: self._reset_on_show = False self.reset() - def closeEvent(self, event): - self.save_changes() - self._reset_on_show = True - self._controller.clear_thumbnail_temp_dir_path() - super(PublisherWindow, self).closeEvent(event) - def save_changes(self): self._controller.save_changes() @@ -410,8 +464,21 @@ def reset(self): def set_context_label(self, label): self._context_label.setText(label) + def set_tab_on_reset(self, tab): + """Define tab that will be selected on window show. + + This is single use method, when publisher window is showed the value is + unset and not used on next show. + + Args: + tab (Union[int, Literal[create, publish, details, report]]: Index + or name of tab which will be selected on show (after reset). + """ + + self._tab_on_reset = tab + def _update_publish_details_widget(self, force=False): - if not force and self._tabs_widget.current_tab() != "details": + if not force and not self._is_on_details_tab(): return report_data = self.controller.get_publish_report() @@ -441,6 +508,10 @@ def _on_help_click(self): self._help_dialog.width(), self._help_dialog.height() ) + def _on_create_overlay_button_click(self): + self._create_overlay_button.set_under_mouse(False) + self._go_to_publish_tab() + def _on_tab_change(self, old_tab, new_tab): if old_tab == "details": self._publish_details_widget.close_details_popup() @@ -465,20 +536,53 @@ def _on_tab_change(self, old_tab, new_tab): self._report_widget ) + is_create = new_tab == "create" + if is_create: + self._install_app_event_listener() + else: + self._uninstall_app_event_listener() + self._create_overlay_button.set_visible(is_create) + def _on_context_or_active_change(self): self._validate_create_instances() def _on_create_request(self): self._go_to_create_tab() + def _set_current_tab(self, identifier): + self._tabs_widget.set_current_tab(identifier) + + def set_current_tab(self, tab): + self._set_current_tab(tab) + if not self._window_is_visible: + self.set_tab_on_reset(tab) + + def _is_current_tab(self, identifier): + return self._tabs_widget.is_current_tab(identifier) + def _go_to_create_tab(self): - self._tabs_widget.set_current_tab("create") + self._set_current_tab("create") + + def _go_to_publish_tab(self): + self._set_current_tab("publish") def _go_to_details_tab(self): - self._tabs_widget.set_current_tab("details") + self._set_current_tab("details") def _go_to_report_tab(self): - self._tabs_widget.set_current_tab("report") + self._set_current_tab("report") + + def _is_on_create_tab(self): + return self._is_current_tab("create") + + def _is_on_publish_tab(self): + return self._is_current_tab("publish") + + def _is_on_details_tab(self): + return self._is_current_tab("details") + + def _is_on_report_tab(self): + return self._is_current_tab("report") def _set_publish_overlay_visibility(self, visible): if visible: @@ -530,11 +634,33 @@ def _on_publish_reset(self): self._set_publish_visibility(False) self._set_footer_enabled(False) self._update_publish_details_widget() - if ( - not self._tabs_widget.is_current_tab("create") - and not self._tabs_widget.is_current_tab("publish") + + def _on_controller_reset(self): + self._first_reset, first_reset = False, self._first_reset + if self._tab_on_reset is not None: + self._tab_on_reset, new_tab = None, self._tab_on_reset + self._set_current_tab(new_tab) + return + + # On first reset change tab based on available items + # - if there is at least one instance the tab is changed to 'publish' + # otherwise 'create' is used + # - this happens only on first show + if first_reset: + if self._overview_widget.has_items(): + self._go_to_publish_tab() + else: + self._go_to_create_tab() + + elif ( + not self._is_on_create_tab() + and not self._is_on_publish_tab() ): - self._tabs_widget.set_current_tab("publish") + # If current tab is not 'Create' or 'Publish' go to 'Publish' + # - this can happen when publishing started and was reset + # at that moment it doesn't make sense to stay at publish + # specific tabs. + self._go_to_publish_tab() def _on_publish_start(self): self._create_tab.setEnabled(False) @@ -550,8 +676,8 @@ def _on_publish_start(self): self._publish_details_widget.close_details_popup() - if self._tabs_widget.is_current_tab(self._create_tab): - self._tabs_widget.set_current_tab("publish") + if self._is_on_create_tab(): + self._go_to_publish_tab() def _on_publish_validated_change(self, event): if event["value"]: @@ -564,7 +690,7 @@ def _on_publish_stop(self): publish_has_crashed = self._controller.publish_has_crashed validate_enabled = not publish_has_crashed publish_enabled = not publish_has_crashed - if self._tabs_widget.is_current_tab("publish"): + if self._is_on_publish_tab(): self._go_to_report_tab() if validate_enabled: @@ -668,6 +794,36 @@ def _on_convertor_error(self, event): event["title"], new_failed_info, "Convertor:" ) + def _update_create_overlay_size(self): + metrics = self._create_overlay_button.fontMetrics() + height = int(metrics.height()) + width = int(height * 0.7) + end_pos_x = self.width() + start_pos_x = end_pos_x - width + + center = self._content_widget.parent().mapTo( + self, + self._content_widget.rect().center() + ) + pos_y = center.y() - (height * 0.5) + + self._create_overlay_button.setGeometry( + start_pos_x, pos_y, + width, height + ) + + def _update_create_overlay_visibility(self, global_pos=None): + if global_pos is None: + global_pos = QtGui.QCursor.pos() + + under_mouse = False + my_pos = self.mapFromGlobal(global_pos) + if self.rect().contains(my_pos): + widget_geo = self._overview_widget.get_subset_views_geo() + widget_x = widget_geo.left() + (widget_geo.width() * 0.5) + under_mouse = widget_x < global_pos.x() + self._create_overlay_button.set_under_mouse(under_mouse) + class ErrorsMessageBox(ErrorMessageBox): def __init__(self, error_title, failed_info, message_start, parent): diff --git a/openpype/tools/settings/settings/constants.py b/openpype/tools/settings/settings/constants.py index d98d18c8bf5..23526e4de93 100644 --- a/openpype/tools/settings/settings/constants.py +++ b/openpype/tools/settings/settings/constants.py @@ -24,7 +24,6 @@ "SETTINGS_PATH_KEY", "ROOT_KEY", - "SETTINGS_PATH_KEY", "VALUE_KEY", "SAVE_TIME_KEY", "PROJECT_NAME_KEY", diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py index f8a8273b267..18c2b276782 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py @@ -178,7 +178,7 @@ def _process_paths(self, in_paths): paths = self._get_all_paths(in_paths) collectionable_paths = [] non_collectionable_paths = [] - for path in in_paths: + for path in paths: ext = os.path.splitext(path)[1] if ext in self.image_extensions or ext in self.sequence_types: collectionable_paths.append(path) diff --git a/openpype/tools/utils/host_tools.py b/openpype/tools/utils/host_tools.py index 046dcbdf6ac..e8593a8ae2a 100644 --- a/openpype/tools/utils/host_tools.py +++ b/openpype/tools/utils/host_tools.py @@ -285,14 +285,12 @@ def get_publisher_tool(self, parent=None, controller=None): return self._publisher_tool - def show_publisher_tool(self, parent=None, controller=None): + def show_publisher_tool(self, parent=None, controller=None, tab=None): with qt_app_context(): - dialog = self.get_publisher_tool(parent, controller) - - dialog.show() - dialog.raise_() - dialog.activateWindow() - dialog.showNormal() + window = self.get_publisher_tool(parent, controller) + if tab: + window.set_current_tab(tab) + window.make_sure_is_visible() def get_tool_by_name(self, tool_name, parent=None, *args, **kwargs): """Show tool by it's name. @@ -446,8 +444,8 @@ def show_publish(parent=None): _SingletonPoint.show_tool_by_name("publish", parent) -def show_publisher(parent=None): - _SingletonPoint.show_tool_by_name("publisher", parent) +def show_publisher(parent=None, **kwargs): + _SingletonPoint.show_tool_by_name("publisher", parent, **kwargs) def show_experimental_tools_dialog(parent=None): diff --git a/openpype/vendor/python/python_2/secrets/LICENSE b/openpype/vendor/python/python_2/secrets/LICENSE new file mode 100644 index 00000000000..d3211e4d9f4 --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 Scaleway + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/openpype/vendor/python/python_2/secrets/__init__.py b/openpype/vendor/python/python_2/secrets/__init__.py new file mode 100644 index 00000000000..c29ee61be1c --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + + +__version__ = "1.0.6" + +# Emulates __all__ for Python2 +from .secrets import ( + choice, + randbelow, + randbits, + SystemRandom, + token_bytes, + token_hex, + token_urlsafe, + compare_digest +) diff --git a/openpype/vendor/python/python_2/secrets/secrets.py b/openpype/vendor/python/python_2/secrets/secrets.py new file mode 100644 index 00000000000..967d2862d90 --- /dev/null +++ b/openpype/vendor/python/python_2/secrets/secrets.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +"""Generate cryptographically strong pseudo-random numbers suitable for + +managing secrets such as account authentication, tokens, and similar. + + +See PEP 506 for more information. + +https://www.python.org/dev/peps/pep-0506/ + + +""" + + +__all__ = ['choice', 'randbelow', 'randbits', 'SystemRandom', + + 'token_bytes', 'token_hex', 'token_urlsafe', + + 'compare_digest', + + ] + +import os +import sys +from random import SystemRandom + +import base64 + +import binascii + + +# hmac.compare_digest did appear in python 2.7.7 +if sys.version_info >= (2, 7, 7): + from hmac import compare_digest +else: + # If we use an older python version, we will define an equivalent method + def compare_digest(a, b): + """Compatibility compare_digest method for python < 2.7. + This method is NOT cryptographically secure and may be subject to + timing attacks, see https://docs.python.org/2/library/hmac.html + """ + return a == b + + +_sysrand = SystemRandom() + + +randbits = _sysrand.getrandbits + +choice = _sysrand.choice + + +def randbelow(exclusive_upper_bound): + + """Return a random int in the range [0, n).""" + + if exclusive_upper_bound <= 0: + + raise ValueError("Upper bound must be positive.") + + return _sysrand._randbelow(exclusive_upper_bound) + + +DEFAULT_ENTROPY = 32 # number of bytes to return by default + + +def token_bytes(nbytes=None): + + """Return a random byte string containing *nbytes* bytes. + + + If *nbytes* is ``None`` or not supplied, a reasonable + + default is used. + + + >>> token_bytes(16) #doctest:+SKIP + + b'\\xebr\\x17D*t\\xae\\xd4\\xe3S\\xb6\\xe2\\xebP1\\x8b' + + + """ + + if nbytes is None: + + nbytes = DEFAULT_ENTROPY + + return os.urandom(nbytes) + + +def token_hex(nbytes=None): + + """Return a random text string, in hexadecimal. + + + The string has *nbytes* random bytes, each byte converted to two + + hex digits. If *nbytes* is ``None`` or not supplied, a reasonable + + default is used. + + + >>> token_hex(16) #doctest:+SKIP + + 'f9bf78b9a18ce6d46a0cd2b0b86df9da' + + + """ + + return binascii.hexlify(token_bytes(nbytes)).decode('ascii') + + +def token_urlsafe(nbytes=None): + + """Return a random URL-safe text string, in Base64 encoding. + + + The string has *nbytes* random bytes. If *nbytes* is ``None`` + + or not supplied, a reasonable default is used. + + + >>> token_urlsafe(16) #doctest:+SKIP + + 'Drmhze6EPcv0fN_81Bj-nA' + + + """ + + tok = token_bytes(nbytes) + + return base64.urlsafe_b64encode(tok).rstrip(b'=').decode('ascii') diff --git a/openpype/version.py b/openpype/version.py index 268f33083ac..443c76544bf 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.7-nightly.5" +__version__ = "3.14.9-nightly.2" diff --git a/setup.cfg b/setup.cfg index 0a9664033de..10cca3eb3fb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,7 +8,8 @@ exclude = docs, */vendor, website, - openpype/vendor + openpype/vendor, + *deadline/repository/custom/plugins max-complexity = 30 diff --git a/tests/README.md b/tests/README.md index 69828cdbc29..d36b6534f8b 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,5 +1,15 @@ Automatic tests for OpenPype ============================ + +Requirements: +============ +Tests are recreating fresh DB for each run, so `mongorestore`, `mongodump` and `mongoimport` command line tools must be installed and on Path. + +You can find intallers here: https://www.mongodb.com/docs/database-tools/installation/installation/ + +You can test that `mongorestore` is available by running this in console, or cmd: +```mongorestore --version``` + Structure: - integration - end to end tests, slow (see README.md in the integration folder for more info) - openpype/modules/MODULE_NAME - structure follow directory structure in code base diff --git a/tests/conftest.py b/tests/conftest.py index aa850be1a63..7b58b0314dd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -43,3 +43,15 @@ def app_variant(request): @pytest.fixture(scope="module") def timeout(request): return request.config.getoption("--timeout") + + +@pytest.hookimpl(tryfirst=True, hookwrapper=True) +def pytest_runtest_makereport(item, call): + # execute all other hooks to obtain the report object + outcome = yield + rep = outcome.get_result() + + # set a report attribute for each phase of a call, which can + # be "setup", "call", "teardown" + + setattr(item, "rep_" + rep.when, rep) diff --git a/tests/integration/hosts/aftereffects/lib.py b/tests/integration/hosts/aftereffects/lib.py index 9fffc6073da..ffad33d13c3 100644 --- a/tests/integration/hosts/aftereffects/lib.py +++ b/tests/integration/hosts/aftereffects/lib.py @@ -2,10 +2,13 @@ import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class AfterEffectsTestClass(HostFixtures): +class AEHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -15,15 +18,15 @@ def last_workfile_path(self, download_test_data, output_folder_url): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.aep") - dest_folder = os.path.join(download_test_data, + "test_project_test_asset_test_task_v001.aep") + dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, "work", self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.aep") + "test_project_test_asset_test_task_v001.aep") shutil.copy(src_path, dest_path) yield dest_path @@ -32,3 +35,12 @@ def last_workfile_path(self, download_test_data, output_folder_url): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + # skip folder that contain "Logs", these come only from Deadline + return ["Logs", "Auto-Save"] + + +class AELocalPublishTestClass(AEHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py similarity index 58% rename from tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py rename to tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py index 4925cbd2d7f..5d0c15d63a6 100644 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects.py +++ b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_legacy.py @@ -1,14 +1,16 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass +from tests.integration.hosts.aftereffects.lib import AELocalPublishTestClass log = logging.getLogger("test_publish_in_aftereffects") -class TestPublishInAfterEffects(AfterEffectsTestClass): +class TestPublishInAfterEffects(AELocalPublishTestClass): """Basic test case for publishing in AfterEffects + Uses old Pyblish schema of created instances. + Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. @@ -27,15 +29,15 @@ class TestPublishInAfterEffects(AfterEffectsTestClass): PERSIST = False TEST_FILES = [ - ("1c8261CmHwyMgS-g7S4xL5epAp0jCBmhf", - "test_aftereffects_publish.zip", + ("1jqI_uG2NusKFvZZF7C0ScHjxFJrlc9F-", + "test_aftereffects_publish_legacy.zip", "") ] - APP = "aftereffects" + APP_GROUP = "aftereffects" APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -49,23 +51,37 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - failures.append( DBAssert.count_of_types(dbcon, "subset", 1, name="workfileTest_task")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderTestTaskDefault", + additional_args = {"context.subset": "workfileTest_task", + "context.ext": "aep"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "png"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "thumbnail"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + additional_args = {"context.subset": "renderTest_taskMain", + "name": "png_png"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, additional_args=additional_args)) diff --git a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py b/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py deleted file mode 100644 index c882e0f9b2c..00000000000 --- a/tests/integration/hosts/aftereffects/test_publish_in_aftereffects_multiframe.py +++ /dev/null @@ -1,64 +0,0 @@ -import logging - -from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.aftereffects.lib import AfterEffectsTestClass - -log = logging.getLogger("test_publish_in_aftereffects") - - -class TestPublishInAfterEffects(AfterEffectsTestClass): - """Basic test case for publishing in AfterEffects - - Should publish 5 frames - """ - PERSIST = True - - TEST_FILES = [ - ("12aSDRjthn4X3yw83gz_0FZJcRRiVDEYT", - "test_aftereffects_publish_multiframe.zip", - "") - ] - - APP = "aftereffects" - APP_VARIANT = "" - - APP_NAME = "{}/{}".format(APP, APP_VARIANT) - - TIMEOUT = 120 # publish timeout - - def test_db_asserts(self, dbcon, publish_finished): - """Host and input data dependent expected results in DB.""" - print("test_db_asserts") - failures = [] - - failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - - failures.append( - DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="imageMainBackgroundcopy")) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="workfileTest_task")) - - failures.append( - DBAssert.count_of_types(dbcon, "subset", 1, - name="reviewTesttask")) - - failures.append( - DBAssert.count_of_types(dbcon, "representation", 4)) - - additional_args = {"context.subset": "renderTestTaskDefault", - "context.ext": "png"} - failures.append( - DBAssert.count_of_types(dbcon, "representation", 1, - additional_args=additional_args)) - - assert not any(failures) - - -if __name__ == "__main__": - test_case = TestPublishInAfterEffects() diff --git a/tests/integration/hosts/maya/lib.py b/tests/integration/hosts/maya/lib.py index f3a438c0650..ab402f36e04 100644 --- a/tests/integration/hosts/maya/lib.py +++ b/tests/integration/hosts/maya/lib.py @@ -2,10 +2,13 @@ import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class MayaTestClass(HostFixtures): +class MayaHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -15,7 +18,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): src_path = os.path.join(download_test_data, "input", "workfile", - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") dest_folder = os.path.join(output_folder_url, self.PROJECT, self.ASSET, @@ -23,7 +26,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): self.TASK) os.makedirs(dest_folder) dest_path = os.path.join(dest_folder, - "test_project_test_asset_TestTask_v001.mb") + "test_project_test_asset_test_task_v001.mb") shutil.copy(src_path, dest_path) yield dest_path @@ -39,3 +42,11 @@ def startup_scripts(self, monkeypatch_session, download_test_data): "{}{}{}".format(startup_path, os.pathsep, original_pythonpath)) + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] + + +class MayaLocalPublishTestClass(MayaHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/maya/test_publish_in_maya.py b/tests/integration/hosts/maya/test_publish_in_maya.py index 68b05644280..b7ee228aaec 100644 --- a/tests/integration/hosts/maya/test_publish_in_maya.py +++ b/tests/integration/hosts/maya/test_publish_in_maya.py @@ -1,7 +1,8 @@ -from tests.integration.hosts.maya.lib import MayaTestClass +from tests.lib.assert_classes import DBAssert +from tests.integration.hosts.maya.lib import MayaLocalPublishTestClass -class TestPublishInMaya(MayaTestClass): +class TestPublishInMaya(MayaLocalPublishTestClass): """Basic test case for publishing in Maya Shouldnt be running standalone only via 'runtests' pype command! (??) @@ -28,7 +29,7 @@ class TestPublishInMaya(MayaTestClass): ("1BTSIIULJTuDc8VvXseuiJV_fL6-Bu7FP", "test_maya_publish.zip", "") ] - APP = "maya" + APP_GROUP = "maya" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" @@ -37,33 +38,41 @@ class TestPublishInMaya(MayaTestClass): def test_db_asserts(self, dbcon, publish_finished): """Host and input data dependent expected results in DB.""" print("test_db_asserts") - assert 5 == dbcon.count_documents({"type": "version"}), \ - "Not expected no of versions" + failures = [] + failures.append(DBAssert.count_of_types(dbcon, "version", 2)) - assert 0 == dbcon.count_documents({"type": "version", - "name": {"$ne": 1}}), \ - "Only versions with 1 expected" + failures.append( + DBAssert.count_of_types(dbcon, "version", 0, name={"$ne": 1})) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "modelMain"}), \ - "modelMain subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="modelMain")) - assert 1 == dbcon.count_documents({"type": "subset", - "name": "workfileTest_task"}), \ - "workfileTest_task subset must be present" + failures.append( + DBAssert.count_of_types(dbcon, "subset", 1, + name="workfileTest_task")) - assert 11 == dbcon.count_documents({"type": "representation"}), \ - "Not expected no of representations" + failures.append(DBAssert.count_of_types(dbcon, "representation", 5)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "abc"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "abc"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) - assert 2 == dbcon.count_documents({"type": "representation", - "context.subset": "modelMain", - "context.ext": "ma"}), \ - "Not expected no of representations with ext 'abc'" + additional_args = {"context.subset": "modelMain", + "context.ext": "ma"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 2, + additional_args=additional_args)) + + additional_args = {"context.subset": "workfileTest_task", + "context.ext": "mb"} + failures.append( + DBAssert.count_of_types(dbcon, "representation", 1, + additional_args=additional_args)) + + assert not any(failures) if __name__ == "__main__": diff --git a/tests/integration/hosts/nuke/lib.py b/tests/integration/hosts/nuke/lib.py index d3c3d7ba816..96daec7427b 100644 --- a/tests/integration/hosts/nuke/lib.py +++ b/tests/integration/hosts/nuke/lib.py @@ -1,17 +1,20 @@ import os import pytest -import shutil +import re -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest, +) -class NukeTestClass(HostFixtures): +class NukeHostFixtures(HostFixtures): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. """ - source_file_name = "test_project_test_asset_CompositingInNuke_v001.nk" + source_file_name = "test_project_test_asset_test_task_v001.nk" src_path = os.path.join(download_test_data, "input", "workfile", @@ -27,7 +30,16 @@ def last_workfile_path(self, download_test_data, output_folder_url): dest_path = os.path.join(dest_folder, source_file_name) - shutil.copy(src_path, dest_path) + # rewrite old root with temporary file + # TODO - using only C:/projects seems wrong - but where to get root ? + replace_pattern = re.compile(re.escape("C:/projects"), re.IGNORECASE) + with open(src_path, "r") as fp: + updated = fp.read() + updated = replace_pattern.sub(output_folder_url.replace("\\", '/'), + updated) + + with open(dest_path, "w") as fp: + fp.write(updated) yield dest_path @@ -41,4 +53,12 @@ def startup_scripts(self, monkeypatch_session, download_test_data): monkeypatch_session.setenv("NUKE_PATH", "{}{}{}".format(startup_path, os.pathsep, - original_nuke_path)) \ No newline at end of file + original_nuke_path)) + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield ["renders"] + + +class NukeLocalPublishTestClass(NukeHostFixtures, PublishTest): + """Testing class for local publishes.""" diff --git a/tests/integration/hosts/nuke/test_publish_in_nuke.py b/tests/integration/hosts/nuke/test_publish_in_nuke.py index 884160e0b59..f84f13fa20d 100644 --- a/tests/integration/hosts/nuke/test_publish_in_nuke.py +++ b/tests/integration/hosts/nuke/test_publish_in_nuke.py @@ -1,17 +1,25 @@ import logging from tests.lib.assert_classes import DBAssert -from tests.integration.hosts.nuke.lib import NukeTestClass +from tests.integration.hosts.nuke.lib import NukeLocalPublishTestClass log = logging.getLogger("test_publish_in_nuke") -class TestPublishInNuke(NukeTestClass): +class TestPublishInNuke(NukeLocalPublishTestClass): """Basic test case for publishing in Nuke Uses generic TestCase to prepare fixtures for test data, testing DBs, env vars. + !!! + It expects modified path in WriteNode, + use '[python {nuke.script_directory()}]' instead of regular root + dir (eg. instead of `c:/projects/test_project/test_asset/test_task`). + Access file path by selecting WriteNode group, CTRL+Enter, update file + input + !!! + Opens Nuke, run publish on prepared workile. Then checks content of DB (if subset, version, representations were @@ -20,7 +28,8 @@ class TestPublishInNuke(NukeTestClass): How to run: (in cmd with activated {OPENPYPE_ROOT}/.venv) - {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py runtests ../tests/integration/hosts/nuke # noqa: E501 + {OPENPYPE_ROOT}/.venv/Scripts/python.exe {OPENPYPE_ROOT}/start.py + runtests ../tests/integration/hosts/nuke # noqa: E501 To check log/errors from launched app's publish process keep PERSIST to True and check `test_openpype.logs` collection. @@ -30,14 +39,14 @@ class TestPublishInNuke(NukeTestClass): ("1SUurHj2aiQ21ZIMJfGVBI2KjR8kIjBGI", "test_Nuke_publish.zip", "") ] - APP = "nuke" + APP_GROUP = "nuke" - TIMEOUT = 120 # publish timeout + TIMEOUT = 50 # publish timeout # could be overwritten by command line arguments # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - PERSIST = True # True - keep test_db, test_openpype, outputted test files + PERSIST = False # True - keep test_db, test_openpype, outputted test files TEST_DATA_FOLDER = None def test_db_asserts(self, dbcon, publish_finished): @@ -52,7 +61,7 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "subset", 1, - name="renderCompositingInNukeMain")) + name="renderTest_taskMain")) failures.append( DBAssert.count_of_types(dbcon, "subset", 1, @@ -61,7 +70,7 @@ def test_db_asserts(self, dbcon, publish_finished): failures.append( DBAssert.count_of_types(dbcon, "representation", 4)) - additional_args = {"context.subset": "renderCompositingInNukeMain", + additional_args = {"context.subset": "renderTest_taskMain", "context.ext": "exr"} failures.append( DBAssert.count_of_types(dbcon, "representation", 1, diff --git a/tests/integration/hosts/photoshop/lib.py b/tests/integration/hosts/photoshop/lib.py index 16ef2d3ae68..9d51a11c061 100644 --- a/tests/integration/hosts/photoshop/lib.py +++ b/tests/integration/hosts/photoshop/lib.py @@ -2,10 +2,13 @@ import pytest import shutil -from tests.lib.testing_classes import HostFixtures +from tests.lib.testing_classes import ( + HostFixtures, + PublishTest +) -class PhotoshopTestClass(HostFixtures): +class PhotoshopTestClass(HostFixtures, PublishTest): @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): """Get last_workfile_path from source data. @@ -32,3 +35,7 @@ def last_workfile_path(self, download_test_data, output_folder_url): def startup_scripts(self, monkeypatch_session, download_test_data): """Points Maya to userSetup file from input data""" pass + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + yield [] diff --git a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py index 5387bbe51e1..4aaf43234db 100644 --- a/tests/integration/hosts/photoshop/test_publish_in_photoshop.py +++ b/tests/integration/hosts/photoshop/test_publish_in_photoshop.py @@ -41,11 +41,11 @@ class TestPublishInPhotoshop(PhotoshopTestClass): ("1zD2v5cBgkyOm_xIgKz3WKn8aFB_j8qC-", "test_photoshop_publish.zip", "") ] - APP = "photoshop" + APP_GROUP = "photoshop" # keep empty to locate latest installed variant or explicit APP_VARIANT = "" - APP_NAME = "{}/{}".format(APP, APP_VARIANT) + APP_NAME = "{}/{}".format(APP_GROUP, APP_VARIANT) TIMEOUT = 120 # publish timeout @@ -72,7 +72,7 @@ def test_db_asserts(self, dbcon, publish_finished): name="workfileTest_task")) failures.append( - DBAssert.count_of_types(dbcon, "representation", 8)) + DBAssert.count_of_types(dbcon, "representation", 6)) additional_args = {"context.subset": "imageMainForeground", "context.ext": "png"} diff --git a/tests/lib/db_handler.py b/tests/lib/db_handler.py index b1810550124..82e741cc3b3 100644 --- a/tests/lib/db_handler.py +++ b/tests/lib/db_handler.py @@ -118,9 +118,8 @@ def setup_from_dump(self, db_name, dump_dir, overwrite=False, "Run with overwrite=True") else: if collection: - coll = self.client[db_name_out].get(collection) - if coll: - coll.drop() + if collection in self.client[db_name_out].list_collection_names(): # noqa + self.client[db_name_out][collection].drop() else: self.teardown(db_name_out) @@ -133,7 +132,11 @@ def setup_from_dump(self, db_name, dump_dir, overwrite=False, db_name=db_name, db_name_out=db_name_out, collection=collection) print("mongorestore query:: {}".format(query)) - subprocess.run(query) + try: + subprocess.run(query) + except FileNotFoundError: + raise RuntimeError("'mongorestore' utility must be on path." + "Please install it.") def teardown(self, db_name): """Drops 'db_name' if exists.""" @@ -231,13 +234,15 @@ def _import_query(self, uri, sql_url, # Examples # handler = DBHandler(uri="mongodb://localhost:27017") # # -# backup_dir = "c:\\projects\\test_nuke_publish\\input\\dumps" +# backup_dir = "c:\\projects\\test_zips\\test_nuke_deadline_publish\\input\\dumps" # noqa # # # -# handler.backup_to_dump("avalon", backup_dir, True, collection="test_project") -# handler.setup_from_dump("test_db", backup_dir, True, db_name_out="avalon", collection="test_project") -# handler.setup_from_sql_file("test_db", "c:\\projects\\sql\\item.sql", +# handler.backup_to_dump("avalon_tests", backup_dir, True, collection="test_project") # noqa +#handler.backup_to_dump("openpype_tests", backup_dir, True, collection="settings") # noqa + +# handler.setup_from_dump("avalon_tests", backup_dir, True, db_name_out="avalon_tests", collection="test_project") # noqa +# handler.setup_from_sql_file("avalon_tests", "c:\\projects\\sql\\item.sql", # collection="test_project", # drop=False, mode="upsert") -# handler.setup_from_sql("test_db", "c:\\projects\\sql", +# handler.setup_from_sql("avalon_tests", "c:\\projects\\sql", # collection="test_project", # drop=False, mode="upsert") diff --git a/tests/lib/testing_classes.py b/tests/lib/testing_classes.py index 78a9f810954..82cc321ae86 100644 --- a/tests/lib/testing_classes.py +++ b/tests/lib/testing_classes.py @@ -8,6 +8,7 @@ import shutil import glob import platform +import re from tests.lib.db_handler import DBHandler from common.openpype_common.distribution.file_handler import RemoteFileHandler @@ -36,9 +37,9 @@ class ModuleUnitTest(BaseTest): PERSIST = False # True to not purge temporary folder nor test DB TEST_OPENPYPE_MONGO = "mongodb://localhost:27017" - TEST_DB_NAME = "test_db" + TEST_DB_NAME = "avalon_tests" TEST_PROJECT_NAME = "test_project" - TEST_OPENPYPE_NAME = "test_openpype" + TEST_OPENPYPE_NAME = "openpype_tests" TEST_FILES = [] @@ -57,7 +58,7 @@ def monkeypatch_session(self): m.undo() @pytest.fixture(scope="module") - def download_test_data(self, test_data_folder, persist=False): + def download_test_data(self, test_data_folder, persist, request): test_data_folder = test_data_folder or self.TEST_DATA_FOLDER if test_data_folder: print("Using existing folder {}".format(test_data_folder)) @@ -78,7 +79,8 @@ def download_test_data(self, test_data_folder, persist=False): print("Temporary folder created:: {}".format(tmpdir)) yield tmpdir - persist = persist or self.PERSIST + persist = (persist or self.PERSIST or + self.is_test_failed(request)) if not persist: print("Removing {}".format(tmpdir)) shutil.rmtree(tmpdir) @@ -125,7 +127,8 @@ def env_var(self, monkeypatch_session, download_test_data): monkeypatch_session.setenv("TEST_SOURCE_FOLDER", download_test_data) @pytest.fixture(scope="module") - def db_setup(self, download_test_data, env_var, monkeypatch_session): + def db_setup(self, download_test_data, env_var, monkeypatch_session, + request): """Restore prepared MongoDB dumps into selected DB.""" backup_dir = os.path.join(download_test_data, "input", "dumps") @@ -135,13 +138,14 @@ def db_setup(self, download_test_data, env_var, monkeypatch_session): overwrite=True, db_name_out=self.TEST_DB_NAME) - db_handler.setup_from_dump("openpype", backup_dir, + db_handler.setup_from_dump(self.TEST_OPENPYPE_NAME, backup_dir, overwrite=True, db_name_out=self.TEST_OPENPYPE_NAME) yield db_handler - if not self.PERSIST: + persist = self.PERSIST or self.is_test_failed(request) + if not persist: db_handler.teardown(self.TEST_DB_NAME) db_handler.teardown(self.TEST_OPENPYPE_NAME) @@ -166,6 +170,13 @@ def dbcon_openpype(self, db_setup): mongo_client = OpenPypeMongoConnection.get_mongo_client() yield mongo_client[self.TEST_OPENPYPE_NAME]["settings"] + def is_test_failed(self, request): + # if request.node doesn't have rep_call, something failed + try: + return request.node.rep_call.failed + except AttributeError: + return True + class PublishTest(ModuleUnitTest): """Test class for publishing in hosts. @@ -188,7 +199,7 @@ class PublishTest(ModuleUnitTest): TODO: implement test on file size, file content """ - APP = "" + APP_GROUP = "" TIMEOUT = 120 # publish timeout @@ -210,10 +221,10 @@ def app_name(self, app_variant): if not app_variant: variant = ( application_manager.find_latest_available_variant_for_group( - self.APP)) + self.APP_GROUP)) app_variant = variant.name - yield "{}/{}".format(self.APP, app_variant) + yield "{}/{}".format(self.APP_GROUP, app_variant) @pytest.fixture(scope="module") def output_folder_url(self, download_test_data): @@ -310,7 +321,8 @@ def publish_finished(self, dbcon, launched_app, download_test_data, yield True def test_folder_structure_same(self, dbcon, publish_finished, - download_test_data, output_folder_url): + download_test_data, output_folder_url, + skip_compare_folders): """Check if expected and published subfolders contain same files. Compares only presence, not size nor content! @@ -328,12 +340,33 @@ def test_folder_structure_same(self, dbcon, publish_finished, glob.glob(expected_dir_base + "\\**", recursive=True) if f != expected_dir_base and os.path.exists(f)) - not_matched = expected.symmetric_difference(published) - assert not not_matched, "Missing {} files".format( - "\n".join(sorted(not_matched))) + filtered_published = self._filter_files(published, + skip_compare_folders) + + # filter out temp files also in expected + # could be polluted by accident by copying 'output' to zip file + filtered_expected = self._filter_files(expected, skip_compare_folders) + + not_mtched = filtered_expected.symmetric_difference(filtered_published) + if not_mtched: + raise AssertionError("Missing {} files".format( + "\n".join(sorted(not_mtched)))) + + def _filter_files(self, source_files, skip_compare_folders): + """Filter list of files according to regex pattern.""" + filtered = set() + for file_path in source_files: + if skip_compare_folders: + if not any([re.search(val, file_path) + for val in skip_compare_folders]): + filtered.add(file_path) + else: + filtered.add(file_path) + return filtered -class HostFixtures(PublishTest): + +class HostFixtures(): """Host specific fixtures. Should be implemented once per host.""" @pytest.fixture(scope="module") def last_workfile_path(self, download_test_data, output_folder_url): @@ -344,3 +377,8 @@ def last_workfile_path(self, download_test_data, output_folder_url): def startup_scripts(self, monkeypatch_session, download_test_data): """"Adds init scripts (like userSetup) to expected location""" raise NotImplementedError + + @pytest.fixture(scope="module") + def skip_compare_folders(self): + """Use list of regexs to filter out published folders from comparing""" + raise NotImplementedError diff --git a/tests/resources/test_data.zip b/tests/resources/test_data.zip index 0faab86b37d5c7d1224e8a92cca766ed80536718..e22b9acdbdbcd7312776e33918ac2a6a9211dab3 100644 GIT binary patch delta 3519 zcmaJ@2{=^iA0A|BtYb!D2xCNpu{L(enk)$;^`FuhnnK9lwM?=ljAbyiA|y-5(nu5& zl58c)E)0qxZOHPUp}N<-_qpGB&U3zVp7;Ho_xsNGp6{*J`XHNxt-uPkv+%I(fB;wl z06>j{BRUz6*l{r$hfYpY1~IRU$G@92Nq|E{4jX{=@bYx>bM^J}RkE?zB6Dq$ue@4( zY0C)!T!jDt>P#?U8<{iyXRI|*ryh6`hs_fO#nlge8!HvVqqXQ*32TX zI}#Eups@}}3MwtEeHc7gEyia1jDX4iP#*ZH!e&tTWKi9w$yXrUA|yn*!t(*@Fz?bxCih3_1kQRSZYdZ#IXIi{+P|_K*{XTLV`b8=^DbfJ1mcWt z%~{*PoF8lBRsQV>hgIcM2gjUs%e@~(xbxo&H1?2P=m1?Jq7f=mWX=W9!$+<;J8!E7 z?P)6@!ag1Z?MSgYPpbV-hN#0#w=g8ckeDc6wR=KU)lORUYoEQi3xR@SxYtm zZN>2#S43Valot{T76Cf8M?Y}vAS16Txb1Fd*7*>`Lc=KCDy5^jxMU5_kglJC&nG*D?72{$?u|2b#VG261bjOG$`QUx$4glo@FLh+7s{l%x2OXMD2V&wdJM?CLv` zojPuWcPh*dpC|k=+h05ej*|4_H~M%pV}?P)3={ z1b}Womty924Q4MgCOE9v7_)O&aHjbTfog1-B0<#pGEjW8Nwo-8^2%A{V!#pnAQoH7 z;gGK2y==z?uF`-e{ILX05YrCJs}7Dl5;qZhufMp(h?9`2)Q2h^OvP01s(+69OV%sIwAB_~voy`c6^98w6cRMUWe2%GAmu8y> zy{8;?5tWsD(1J(3etfji**Gf{eQzy8VoC5G>|lcxAt?r?(G;{=hEuQqUssskuIdY% z);zkA?dV>Q!;LgK@3?^sB<7BWE@z!oIWZ^a;U~Fh4CK+O|I#4vp_TWTM_vjIn-0gl z@*|J=_7#|X(CivE^{_FegkJk1IqRKGvpaS}cnRt`(lO%4HZSQb{yO8SjOB5$*QUA7 z^0JK_1ewpU+c)QKI$<0^PRP%taXxw?T|V%;KN@+5UR@JIzEoSi>luES>ylHRkRLl; z@Z+$m%E5YrJ6W-UjDq`7o*I z)@Y({0mied;||Y@V{fb);bxY{ZygBK4P3>ElI7W*PW;%YpqI_Ye1_Xt8>!xJP*^Ry z8@aD(dgc_3bF2>>s=M|mg}rv;1+g>{TE!^Of!m z1cBwUEMM@D`l@8r*|4KrfXB4~Z^;)A%(o5R@6mC&r2`D9h(5jl_Ea4ZVe;Zs6mHo% z%KhCJ3_A8mVdZjWrx+Kh+@-JkYM(#MIn3mdS2_8x`SzU~P-NAFSB@V};G6jPFwW@=kCWwlli?BIpot@>Tnz z`XA$}?p}5E7jKIXm(ZAAnw0J7{!S`D<)&uw?0ax8zrUNR{mQ~l+`J#@NkoGfq9`_RRCw?U^J2mL-@B7?^mDHggLb0Yku*QTchMMwD80Rb6F&!=)Ngu%GZICYpTjx82Rg#$`x zl!Qc%)mma6Tsl$a!=`cSX-0HooK&^?zSHWFwHD*ACMPyj+4VL5u#2o0dYr}0C#3{F zPfp6}&Q{f(sfg*WeS)-sa-GRWi3%T`=h2#nm{Ai_jY^Ac^sE*KFWlqswLJ{7x4>`1>D7p3&wfxta;FpRa=S<59i`Z~6FKxn#btfk&>*mgd~G`3 zz^R|p;P{TinVMhk*zjTqb;Fnu0mS$^jo7j>0nIfB6kzI+=|ztUP$Zvqt%E%_4uAeD{9}7x^uKDZHA8VKv#!`PuHk zE!5Tv=9nNTDFQ0IwTf;-MOp3nNVQPue|&x6ZPjD=NbAtO{{U^5P{&8o7ZQiCb1 z8Ao(24(GQxu%{!~)D>*@3xMhx0Qeig%HGk<^_OVuYti^OM4P$UIlBB048`AK92~*+ zj$V%7Kf%ZV~5Z-Q;;l_Z{v#(ni-;f;D+uA>_3_4;u= zI>y9!s)`ru+DgUns)GsnWtapLep49acxd0{2Wk0TWj4*{imJI2Uk69VWvnWFZM?j| ze%qMx8u;{#E>lYWvOov!cI4*YZd;S;@PlY*>`z9zUMyVrD>_u zk;FT^8hqleVs%_zQL(~>Hsp7;rFNAStI;$Q-g4g}F+&Z$Wt|dOA(V#f@d()?^JueI z2U)cFn;&KHmxB*NZdS^iDfzlzBFgn{(~=Rb+ClY_syq@ zw+DCa2k9DL|6G@sbzJI8MOF;0-xKJ?`7i6(4(w`tvBcS|p#=?&>BoG?0hIm$UQ{tb zO$2FgwnIdX(AP~lAbGT67z!sIx^qt}npX#!QxNn-tt?~<2x6Od?}-azkmC>@k(f4A zwcy+(%^;wIk0qB&dBOuG65f_auz@AZ`Y}+I@lXpWVT1wF4do6C+Jqf~BzK)5wNP1B zPwySh7<&=C2p(?rR%|^hkkGAEQqx5JfUgm!$MJ@rogpqsstbr6`<5_qzpKBzqwWlO z6Y2Mn3wYKS?n6iH0A0l_|2%RRu&b+;{eM?+Mx3%jrx;L%8K)~S4NK1(*@JDCX{|kU zzNO?jbBg;{dRI+EXPhq|I;#p+E8K?ToZ#i{Qgb$iMNTqN1L3L>cMP)}>?vltdgr?O zmAW&HF#%i=FJ+B2Nd$sMVzE!qlSES6Dgpj1rF7QGj>&NmaAVOJt>h+pP733Wt+X)@ zk-JY#+)*|CTXwTGHu+J!QQteImc()L@Y_7F6O8yFh@RlYyLSjwXp4;uI(z1>gV8{#P(K>s0e!7z(GEu z7O8qsi2wP_XUyMI=k**{#TS~gH0X8b|47~Mg)<-yEgamCg+?X4mq3NQV?_I=Ku-9t z(~yR;Gd4N@39nxds4$ z>*7KFx5K$qHj`g;K*MlVt$nVv_X$)3cDVrn_J73w!QtraU=3BgKjdKhfxa#2eY_UI zMjkuV_tg*uMbsVDhzxIG?#-hzzCKl);>U0*CfM}iyrUz zfzt#u3p-niNlru03{b6<2 ze|47I|I9RG%Z5L+ai3iuct==wY(TroCgcHI{)4@l!$y z?7%j};EwBS$(9Xc3 zugDm^S_!1PFaDxQBC~61!1HXOu(;I9S)P$mOkJIaSv&vb1E%CaEvg~u>TU$PJ2WbJ z;bg%L?s6;%sxqDQ^N$N_CBQ=y$x5i`qf17efjb+MLH62VC2w(G1nJ4g8OP_T-)FCvpK0!;0$R3atfiSKInhJW>?c1L4GigStKEX#py?w z3EQ8S%J?KQ54C9-QZ}9+ru+d`bU9^XsnK#n_f_L8msnT8C)-A=$*KX`RY~RHFzJj} z)vY0|(uh0UvT)_90Lg`9kGL=$g#}wLS8B9no91|Dm9W}4z|bIVOe!$*A}FuZ+bl3iO(Z4 zH+j0GKb>D}!Aid;>`aEhck0z8K-76bK&8WPj!!qY=4JBTVd zxu8?YW@7igM%t@QPHCUU)|Zg5W(oa?;leTPd>eyY@i&fkp+bm1q+4P&R@&<{D3QD$ zp2LjyyNvaGzMCY?yh#YdcW@nChc~P$mkzc_)r(R=?N4rA zit{0De1m-w!e&{~Nm*)X4jAB&yRVN3cJ7S$-dzG*?IZ*-=PHJYPQM$#hpV$*jOE)d zg99xSvCL1=$SEzQE<{Ba<{GG14P$;n|Mmvk$12EyPk#pG#WOI$Tp$;V2DZ_1E+UJ+ zggL3EmM$xTe<{9Xw<^7sgM@Wu!FrdQ`-~zw13h+Rw@SpkvS+!10@j&&&)cSv;Wmge zGHI}P8lF+|H`CctcE?(Bxdyj31l+k)i;uHonPe1eIvOk0=H4B3TB9|znjax$vX!xSMi@b_C_J~>WmEWUV6y%-V`pG{#)xe=lmcPcUy~>u5LKm z`0GU!bvy|jNHMYJO?K>?uAxulI5AzzPhzk5O==WuwYRp76(YPv~iCu zjVLUI>7d=%^ql7^ZDTaTK-*wo@#d}_2}D(TA*w2pnX`h$_OwiUJZF@GU z%*u1sPNHdxS9k?&_RxXK-!O=DmffOOF|?6yP2ba_OMlE)OhSUuHa8yykxPK&LfklT z@g+&R=_hV07~{EzahwIeSK^x7zN6?ur1!olgz6*&kX|y#h{{- zL3klV!X+3NyDFYZJQgnBmQ+-L5&gCFatLUtYxcuv+5O{$Pd&h9UQj)H&Adqr3{Iu- z#s?rt+cpZjs%WHiTYEwVUm2%iNxJ2f27r|B$Hy_pvu#RHq=6EYef9px)-zrx$^$x#(auVSGjhrUzcAC4bWD9}#R zP524BC4d5j_T$=t;;mP8iA8Gq8e`6O4ZaXLP?cnFMQXAHTn6sEY$V~%TdfH7DpeEm<2N@|@f(b$&Cp%iLu$%A}KGw9GD~eJ)_-Zse z!qyc@IRFl|sKg~o|HAmGlckX&G3V<@8&}NKcOi4HWc-~ z>E2nB#knaP-=d~0;wxBt=t2>gPqq3WM;eQT-Po%7OVM(j9Q*G(%+z9@yi~93`bwKc z6CeZoaNn671Bt zUO0D=u1}u_OUc(Nwu~Bj{CMIy$G>JCpCPV=Tg>J!Wt5*2m6VBB`g)0(6IV~dv$)Ha zCgwAbTI(>X7O3Yj)(E)BvZ^NaSu4{b)XfEXRfxl`@RLd-D8l<^lkj>)eNj9N+ex7K zzIflmVHJK{Z%{1x9hppW+S?sz{KRcIv(qdw{GB4!+i0d_fh=gGIxAeU%U$|mECr~t z$vz)@QOxS=^v#007JIn#S(Eyf1&dH=HNe{1rxv7IEgAP(TB-*Rl0KoiE57R zyutJbBhN~jTv_@QYr_Twu9*lmG#iT0gjED=_ z=g=eL{pN_b^qekuROR(uC5yBn^d4R!8=3?qa8A)UZl2bZd9v51CzUMJ7J;27m=EFo zySLpQi3UcHg+{>r@Bqc3mx~8-4#MO?%Yd@(+1rODaBAHsnF2*b zAoUfP(>!B!*@BStadXG7YZP%pLFo*_Y}h^Lf$;J5eefkCinlkUN}p@>k8*nysjH8z zPgn3Sb(Wj*FNt0qcP`%XjWOTP`9t6Bv!K`A|L-0D_Y+T)IA%K~F2e4?Vmv}Mgtj?| zgx!dEq@7+|mxGlMV_|2c&h^xsj=jT6(qWo&mdk*UOGTuV+m3*OGh5u)b9fclmmkGMsN!zmT!^fS*ro`a?QBb>S*Lx&RB>`r508eC3%T1E@oY)t%+l!XH7yC1C&n+W!UnFa8gUpm=3j z4FF5~191O!U=Luz(=b5PBY>6}A#VdXF(2!xvHyUUtiZdQLzW^y)I_tROP^GdG=nr) z|I;^8KmE!zI22QD?8WuX4GLmdHG5klqd>Op#So63E|f+r81e50Wp8gIhk{^u#(;bc z0-+7Ttde|PQy+&0aafWJV^i;gbYhD-`!&%3f1d!n8%A>BZTxxGgL-_|Oh&>RATF-P z&aQ5bzo00up>E2whl8_?xs~m&1|v@-mHLcuU2G0C%l%M$;&;PTO!a+H)5y9}-;i(2QZ6$( zyjpBe)!FHi4r3}lF{=b%j2}WqthNI0*0pYfRS_mb==kASfeM7p&jgXyZZ`(p2KVLT z3sR<8Xx0}WXT4%y#*FA1UD-VL;cwZgP3t0K(c0XY*#Jw3lpFMiG~0^^mAel`f-{$= zek4{N!n8@3-fMq1gDJr*6cH>l+?P(j)jN#?M^f$$zc4Eq|3G%vi%>X_3IwzFq$5l_ z?16Z#_`I@JM#Q<6Jz_zqmI}F+ z=D@|lub&GXyKS9JXrG&V4aQ`kk;Z5#9+C?A&iZi@_@>tv2->Ld-abltl>hNdJJQoY zSM#+6*$O)O;;_U!?YBm=&-J}*)V{QOG3k)Z4-q};HdqS}a8Pqe+EX{oOh7Ud*S$^O zT*&I)>0R{9(zIJ}3DQgBYsVA2hpXRk$ABMoR7=7pH+eqS_{WGLLMgbv&Z&!$KrQnd z97quc78mgMZaK7wpzXrmzG%OC=U2F!-1E!$3*7JC^Dkcd6~Rqz`W1oBZx8Jx@f5-aUc7L7zy5V?15BP(=0sbH9f3qL2Ghg@PE_g3HeuVJv zng6Eqf7Oc%kL;W@z+c4lDib%!=>^SYXLtV>&9CBmHU7U&)P=5Hwu_$C z v2 - v3 = OpenPypeVersion(1, 2, 3, staging=True) - assert str(v3) == "1.2.3+staging" + v3 = OpenPypeVersion(1, 2, 3) + assert str(v3) == "1.2.3" - v4 = OpenPypeVersion(1, 2, 3, staging="True", prerelease="rc.1") - assert str(v4) == "1.2.3-rc.1+staging" + v4 = OpenPypeVersion(1, 2, 3, prerelease="rc.1") + assert str(v4) == "1.2.3-rc.1" assert v3 > v4 assert v1 > v4 assert v4 < OpenPypeVersion(1, 2, 3, prerelease="rc.1") @@ -73,7 +73,7 @@ def test_openpype_version(printer): OpenPypeVersion(4, 8, 10), OpenPypeVersion(4, 8, 20), OpenPypeVersion(4, 8, 9), - OpenPypeVersion(1, 2, 3, staging=True), + OpenPypeVersion(1, 2, 3), OpenPypeVersion(1, 2, 3, build="foo") ] res = sorted(sort_versions) @@ -104,27 +104,26 @@ def test_openpype_version(printer): with pytest.raises(ValueError): _ = OpenPypeVersion(version="booobaa") - v11 = OpenPypeVersion(version="4.6.7-foo+staging") + v11 = OpenPypeVersion(version="4.6.7-foo") assert v11.major == 4 assert v11.minor == 6 assert v11.patch == 7 - assert v11.staging is True assert v11.prerelease == "foo" def test_get_main_version(): - ver = OpenPypeVersion(1, 2, 3, staging=True, prerelease="foo") + ver = OpenPypeVersion(1, 2, 3, prerelease="foo") assert ver.get_main_version() == "1.2.3" def test_get_version_path_from_list(): versions = [ OpenPypeVersion(1, 2, 3, path=Path('/foo/bar')), - OpenPypeVersion(3, 4, 5, staging=True, path=Path("/bar/baz")), + OpenPypeVersion(3, 4, 5, path=Path("/bar/baz")), OpenPypeVersion(6, 7, 8, prerelease="x", path=Path("boo/goo")) ] path = BootstrapRepos.get_version_path_from_list( - "3.4.5+staging", versions) + "3.4.5", versions) assert path == Path("/bar/baz") @@ -362,12 +361,15 @@ def mock_user_data_dir(*args, **kwargs): result = fix_bootstrap.find_openpype(include_zips=True) # we should have results as file were created assert result is not None, "no OpenPype version found" - # latest item in `result` should be latest version found. + # latest item in `result` should be the latest version found. + # this will be `7.2.10-foo+staging` even with *staging* in since we've + # dropped the logic to handle staging separately and in alphabetical + # sorting it is after `strange`. expected_path = Path( d_path / "{}{}{}".format( - test_versions_2[3].prefix, - test_versions_2[3].version, - test_versions_2[3].suffix + test_versions_2[4].prefix, + test_versions_2[4].version, + test_versions_2[4].suffix ) ) assert result, "nothing found" diff --git a/tools/run_mongo.ps1 b/tools/run_mongo.ps1 index c64ff75969d..85b94b0971f 100644 --- a/tools/run_mongo.ps1 +++ b/tools/run_mongo.ps1 @@ -112,4 +112,6 @@ $mongoPath = Find-Mongo $preferred_version Write-Color -Text ">>> ", "Using DB path: ", "[ ", "$($dbpath)", " ]" -Color Green, Gray, Cyan, White, Cyan Write-Color -Text ">>> ", "Port: ", "[ ", "$($port)", " ]" -Color Green, Gray, Cyan, White, Cyan +New-Item -ItemType Directory -Force -Path $($dbpath) + Start-Process -FilePath $mongopath "--dbpath $($dbpath) --port $($port)" -PassThru | Out-Null diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md index 4e80f6e19d5..9c99b26f1ea 100644 --- a/website/docs/dev_build.md +++ b/website/docs/dev_build.md @@ -51,7 +51,9 @@ development tools like [CMake](https://cmake.org/) and [Visual Studio](https://v #### Run from source -For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. +For development purposes it is possible to run OpenPype directly from the source. We provide a simple launcher script for this. To run the powershell scripts you may have to enable unrestricted execution as administrator: + +`Set-ExecutionPolicy -ExecutionPolicy unrestricted` To start OpenPype from source you need to diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index 1c8958d1c04..fa2d996e204 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -55,7 +55,7 @@ To run mongoDB on server, use your server distribution tools to set it up (on Li ## Python -**Python 3.7.8** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). +**Python 3.7.9** is the recommended version to use (as per [VFX platform CY2021](https://vfxplatform.com/)). If you're planning to run openPYPE on workstations from built executables (highly recommended), you will only need python for building and development, however, if you'd like to run from source centrally, every user will need python installed. diff --git a/website/yarn.lock b/website/yarn.lock index 177a4a38021..220a489dfa4 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -4812,9 +4812,9 @@ loader-runner@^4.2.0: integrity sha512-92+huvxMvYlMzMt0iIOukcwYBFpkYJdpl2xsZ7LrlayO7E8SOv+JJUEK17B/dJIHAOLMfh2dZZ/Y18WgmGtYNw== loader-utils@^1.4.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.1.tgz#278ad7006660bccc4d2c0c1578e17c5c78d5c0e0" - integrity sha512-1Qo97Y2oKaU+Ro2xnDMR26g1BwMT29jNbem1EvcujW2jqt+j5COXyscjM7bLQkM9HaxI7pkWeW7gnI072yMI9Q== + version "1.4.2" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.2.tgz#29a957f3a63973883eb684f10ffd3d151fec01a3" + integrity sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" From 1338b1372c008de526a0fda97a826adfb313c3b4 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Dec 2022 13:00:29 +0100 Subject: [PATCH 089/130] :bug: fix the path --- openpype/hosts/unreal/api/pipeline.py | 2 +- openpype/hosts/unreal/plugins/publish/collect_instances.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/unreal/api/pipeline.py b/openpype/hosts/unreal/api/pipeline.py index 839465881dc..ca5a42cd828 100644 --- a/openpype/hosts/unreal/api/pipeline.py +++ b/openpype/hosts/unreal/api/pipeline.py @@ -116,7 +116,7 @@ def ls(): """ ar = unreal.AssetRegistryHelpers.get_asset_registry() # UE 5.1 changed how class name is specified - class_name = ["/Script", "AssetContainer"] if UNREAL_VERSION.major == 5 and UNREAL_VERSION.minor > 0 else "AssetContainer" # noqa + class_name = ["/Script/OpenPype", "AssetContainer"] if UNREAL_VERSION.major == 5 and UNREAL_VERSION.minor > 0 else "AssetContainer" # noqa openpype_containers = ar.get_assets_by_class(class_name, True) # get_asset_by_class returns AssetData. To get all metadata we need to diff --git a/openpype/hosts/unreal/plugins/publish/collect_instances.py b/openpype/hosts/unreal/plugins/publish/collect_instances.py index db968330c6f..1f25cbde7d6 100644 --- a/openpype/hosts/unreal/plugins/publish/collect_instances.py +++ b/openpype/hosts/unreal/plugins/publish/collect_instances.py @@ -25,7 +25,7 @@ class CollectInstances(pyblish.api.ContextPlugin): def process(self, context): ar = unreal.AssetRegistryHelpers.get_asset_registry() - class_name = ["/Script", + class_name = ["/Script/OpenPype", "AssetContainer"] if UNREAL_VERSION.major == 5 and \ UNREAL_VERSION.minor > 0 else "OpenPypePublishInstance" # noqa instance_containers = ar.get_assets_by_class(class_name, True) From ef9f338fb15b666db999daaa8f4341371591b3a2 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 9 Dec 2022 13:23:33 +0100 Subject: [PATCH 090/130] :bug: remove unnecessary header --- .../UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h | 1 - 1 file changed, 1 deletion(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h index 2f066bd94bb..e9d94aecfcc 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/OpenPypePublishInstance.h @@ -1,6 +1,5 @@ #pragma once -#include "EditorTutorial.h" #include "Engine.h" #include "OpenPypePublishInstance.generated.h" From ec1b23898df7e748c1b819145119d265bf7e6d14 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 10 Dec 2022 03:28:34 +0000 Subject: [PATCH 091/130] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 443c76544bf..190f7ac4010 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.9-nightly.2" +__version__ = "3.14.9-nightly.3" From 3acbd9bd0cc00811e10f8b48e1da60a129b2109c Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Sat, 10 Dec 2022 09:20:53 +0000 Subject: [PATCH 092/130] Prevent warning about already connected time attribute --- openpype/hosts/maya/api/plugin.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index 66b525bad15..fe30001a96f 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -273,7 +273,12 @@ def update(self, container, representation): if alembic_nodes: for attr, data in alembic_data.items(): node_attr = "{}.{}".format(alembic_nodes[0], attr) - if data["input"]: + + # Prevent warning about connecting to the time attribute + # cause Maya connects to this attribute by default. + if attr == "time": + continue + elif data["input"]: cmds.connectAttr( data["input"], node_attr, force=True ) From b833025a625ec2720da0822f76b4de24faae457f Mon Sep 17 00:00:00 2001 From: Toke Stuart Jepsen Date: Sat, 10 Dec 2022 09:30:42 +0000 Subject: [PATCH 093/130] Improve readability --- openpype/hosts/maya/api/lib.py | 5 +++++ openpype/hosts/maya/api/plugin.py | 30 +++++++++++------------------- 2 files changed, 16 insertions(+), 19 deletions(-) diff --git a/openpype/hosts/maya/api/lib.py b/openpype/hosts/maya/api/lib.py index 2530021eba6..04b0ad35f15 100644 --- a/openpype/hosts/maya/api/lib.py +++ b/openpype/hosts/maya/api/lib.py @@ -3436,3 +3436,8 @@ def dgcontext(mtime): # If no more nodes to process break the frame iterations.. if not node_dependencies: break + + +def get_attribute_input(attr): + connections = cmds.listConnections(attr, plugs=True, destination=False) + return connections[0] if connections else None diff --git a/openpype/hosts/maya/api/plugin.py b/openpype/hosts/maya/api/plugin.py index fe30001a96f..82df85a8be9 100644 --- a/openpype/hosts/maya/api/plugin.py +++ b/openpype/hosts/maya/api/plugin.py @@ -226,11 +226,8 @@ def update(self, container, representation): if alembic_nodes: for attr in alembic_attrs: node_attr = "{}.{}".format(alembic_nodes[0], attr) - inputs = cmds.listConnections( - node_attr, plugs=True, destination=False - ) data = { - "input": None if inputs is None else inputs[0], + "input": lib.get_attribute_input(node_attr), "value": cmds.getAttr(node_attr) } @@ -271,23 +268,18 @@ def update(self, container, representation): "{}:*".format(namespace), type="AlembicNode" ) if alembic_nodes: + alembic_node = alembic_nodes[0] # assume single AlembicNode for attr, data in alembic_data.items(): - node_attr = "{}.{}".format(alembic_nodes[0], attr) - - # Prevent warning about connecting to the time attribute - # cause Maya connects to this attribute by default. - if attr == "time": - continue - elif data["input"]: - cmds.connectAttr( - data["input"], node_attr, force=True - ) + node_attr = "{}.{}".format(alembic_node, attr) + input = lib.get_attribute_input(node_attr) + if data["input"]: + if data["input"] != input: + cmds.connectAttr( + data["input"], node_attr, force=True + ) else: - inputs = cmds.listConnections( - node_attr, plugs=True, destination=False - ) - if inputs: - cmds.disconnectAttr(inputs[0], node_attr) + if input: + cmds.disconnectAttr(input, node_attr) cmds.setAttr(node_attr, data["value"]) # Fix PLN-40 for older containers created with Avalon that had the From 358f1b7d40a38f103041d92c976eeefb8e375bad Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 11:45:51 +0100 Subject: [PATCH 094/130] Revert .toml update of Gazu Toml updates should go to next minor --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 20e676dcde5..f74f40c5615 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ Click = "^7" dnspython = "^2.1.0" ftrack-python-api = "^2.3.3" shotgun_api3 = {git = "https://github.com/shotgunsoftware/python-api.git", rev = "v3.3.3"} -gazu = "^0.8.32" +gazu = "^0.8.28" google-api-python-client = "^1.12.8" # sync server google support (should be separate?) jsonschema = "^2.6.0" keyring = "^22.0.1" From aecc5577400eceaa6381f40b2a26204e84227051 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 12 Dec 2022 14:45:09 +0100 Subject: [PATCH 095/130] events use different approach to define if event should be passed to callback --- openpype/lib/events.py | 46 ++++++++++++++++++++++++++++++++++++------ 1 file changed, 40 insertions(+), 6 deletions(-) diff --git a/openpype/lib/events.py b/openpype/lib/events.py index 747761fb3e6..096201312fb 100644 --- a/openpype/lib/events.py +++ b/openpype/lib/events.py @@ -74,22 +74,52 @@ def __init__(self, topic, func): "Registered callback is not callable. \"{}\"" ).format(str(func))) - # Collect additional data about function - # - name - # - path - # - if expect argument or not + # Collect function name and path to file for logging func_name = func.__name__ func_path = os.path.abspath(inspect.getfile(func)) + + # Get expected arguments from function spec + # - positional arguments are always preferred + expect_args = False + expect_kwargs = False + fake_event = "fake" if hasattr(inspect, "signature"): + # Python 3 using 'Signature' object where we try to bind arg + # or kwarg. Using signature is recommended approach based on + # documentation. sig = inspect.signature(func) - expect_args = len(sig.parameters) > 0 + try: + sig.bind(fake_event) + expect_args = True + except TypeError: + pass + + try: + sig.bind(event=fake_event) + expect_kwargs = True + except TypeError: + pass + else: - expect_args = len(inspect.getargspec(func)[0]) > 0 + # In Python 2 'signature' is not available so 'getcallargs' is used + # - 'getcallargs' is marked as deprecated since Python 3.0 + try: + inspect.getcallargs(func, fake_event) + expect_args = True + except TypeError: + pass + + try: + inspect.getcallargs(func, event=fake_event) + expect_kwargs = True + except TypeError: + pass self._func_ref = func_ref self._func_name = func_name self._func_path = func_path self._expect_args = expect_args + self._expect_kwargs = expect_kwargs self._ref_valid = func_ref is not None self._enabled = True @@ -157,6 +187,10 @@ def process_event(self, event): try: if self._expect_args: callback(event) + + elif self._expect_kwargs: + callback(event=event) + else: callback() From 771d7994dcfbf76bef1547ad1fcdc799e7418697 Mon Sep 17 00:00:00 2001 From: Kayla Man Date: Mon, 12 Dec 2022 22:05:47 +0800 Subject: [PATCH 096/130] update the redshift render settings --- openpype/hosts/maya/api/lib_rendersettings.py | 21 +++++++++++++++++++ .../schemas/schema_maya_render_settings.json | 10 ++++----- 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/maya/api/lib_rendersettings.py b/openpype/hosts/maya/api/lib_rendersettings.py index d9b79e3c2f9..5161141ef94 100644 --- a/openpype/hosts/maya/api/lib_rendersettings.py +++ b/openpype/hosts/maya/api/lib_rendersettings.py @@ -95,6 +95,7 @@ def set_default_renderer_settings(self, renderer=None): if renderer == "redshift": self._set_redshift_settings(width, height) + mel.eval("redshiftUpdateActiveAovList") def _set_arnold_settings(self, width, height): """Sets settings for Arnold.""" @@ -158,7 +159,10 @@ def _set_redshift_settings(self, width, height): cmds.delete(aov) redshift_aovs = redshift_render_presets["aov_list"] + # list all the aovs + all_rs_aovs = cmds.ls(type='RedshiftAOV') for rs_aov in redshift_aovs: + rs_layername = rs_aov if " " in rs_aov: rs_renderlayer = rs_aov.replace(" ", "") rs_layername = "rsAov_{}".format(rs_renderlayer) @@ -170,6 +174,23 @@ def _set_redshift_settings(self, width, height): # update the AOV list mel.eval("redshiftUpdateActiveAovList") + rs_p_engine = redshift_render_presets["primary_gi_engine"] + rs_s_engine = redshift_render_presets["secondary_gi_engine"] + + if int(rs_p_engine) or int(rs_s_engine) != 0: + cmds.setAttr("redshiftOptions.GIEnabled", 1) + if int(rs_p_engine) == 0: + # reset the primary GI Engine as default + cmds.setAttr("redshiftOptions.primaryGIEngine", 4) + if int(rs_s_engine) == 0: + # reset the secondary GI Engine as default + cmds.setAttr("redshiftOptions.secondaryGIEngine", 2) + else: + cmds.setAttr("redshiftOptions.GIEnabled", 0) + + cmds.setAttr("redshiftOptions.primaryGIEngine", int(rs_p_engine)) + cmds.setAttr("redshiftOptions.secondaryGIEngine", int(rs_s_engine)) + additional_options = redshift_render_presets["additional_options"] ext = redshift_render_presets["image_format"] img_exts = ["iff", "exr", "tif", "png", "tga", "jpg"] diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json index 512e45f6744..e90495891bf 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_maya_render_settings.json @@ -317,9 +317,8 @@ "defaults": "0", "enum_items": [ {"0": "None"}, - {"1": "Photon Map"}, - {"2": "Irradiance Cache"}, - {"3": "Brute Force"} + {"3": "Irradiance Cache"}, + {"4": "Brute Force"} ] }, { @@ -330,9 +329,8 @@ "defaults": "0", "enum_items": [ {"0": "None"}, - {"1": "Photon Map"}, - {"2": "Irradiance Cache"}, - {"3": "Brute Force"} + {"2": "Irradiance Point Cloud"}, + {"4": "Brute Force"} ] }, { From b50e60987420762d06435478c8a14706c9c4f969 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 17:37:16 +0100 Subject: [PATCH 097/130] OP-4465 - updated filter_profiles to handle arrays in values key_values might now contain arrays. Useful for filtering on 'families' --- openpype/lib/profiles_filtering.py | 31 +++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/openpype/lib/profiles_filtering.py b/openpype/lib/profiles_filtering.py index 370703a68bb..e030b197167 100644 --- a/openpype/lib/profiles_filtering.py +++ b/openpype/lib/profiles_filtering.py @@ -79,11 +79,11 @@ def fullmatch(regex, string, flags=0): return None -def validate_value_by_regexes(value, in_list): +def validate_value_by_regexes(values, in_list): """Validates in any regex from list match entered value. Args: - value (str): String where regexes is checked. + values (str|list): String where regexes is checked. in_list (list): List with regexes. Returns: @@ -102,17 +102,21 @@ def validate_value_by_regexes(value, in_list): # If value is not set and in list has specific values then resolve value # as not matching. - if not value: + if not values: return -1 + if isinstance(values, str): + values = [values] + regexes = compile_list_of_regexes(in_list) for regex in regexes: - if hasattr(regex, "fullmatch"): - result = regex.fullmatch(value) - else: - result = fullmatch(regex, value) - if result: - return 1 + for value in values: + if hasattr(regex, "fullmatch"): + result = regex.fullmatch(value) + else: + result = fullmatch(regex, value) + if result: + return 1 return -1 @@ -136,7 +140,8 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): Args: profiles_data (list): Profile definitions as dictionaries. - key_values (dict): Mapping of Key <-> Value. Key is checked if is + key_values (dict): Mapping of Key <-> Value|[Value]. + Key is checked if is available in profile and if Value is matching it's values. keys_order (list, tuple): Order of keys from `key_values` which matters only when multiple profiles have same score. @@ -181,12 +186,12 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): profile_scores = [] for key in keys_order: - value = key_values[key] - match = validate_value_by_regexes(value, profile.get(key)) + values = key_values[key] + match = validate_value_by_regexes(values, profile.get(key)) if match == -1: profile_value = profile.get(key) or [] logger.debug( - "\"{}\" not found in \"{}\": {}".format(value, key, + "\"{}\" not found in \"{}\": {}".format(values, key, profile_value) ) profile_points = -1 From 3296ac68ef1a2469ce68651fe4e6f8c8b4c71ba3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 17:41:51 +0100 Subject: [PATCH 098/130] OP-4465 - extract burnin is triggered by profiles in Settings Cleaned up obsolete methods --- openpype/plugins/publish/extract_burnin.py | 233 +++------------------ 1 file changed, 28 insertions(+), 205 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index fd8dfdece97..eab7652ae21 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -1,5 +1,4 @@ import os -import re import json import copy import tempfile @@ -21,6 +20,7 @@ CREATE_NO_WINDOW ) +from openpype.lib.profiles_filtering import filter_profiles class ExtractBurnin(publish.Extractor): @@ -34,25 +34,7 @@ class ExtractBurnin(publish.Extractor): label = "Extract burnins" order = pyblish.api.ExtractorOrder + 0.03 - families = ["review", "burnin"] - hosts = [ - "nuke", - "maya", - "shell", - "hiero", - "premiere", - "traypublisher", - "standalonepublisher", - "harmony", - "fusion", - "aftereffects", - "tvpaint", - "webpublisher", - "aftereffects", - "photoshop", - "flame" - # "resolve" - ] + optional = True positions = [ @@ -69,11 +51,15 @@ class ExtractBurnin(publish.Extractor): "y_offset": 5 } - # Preset attributes + # Configurable by Settings profiles = None options = None def process(self, instance): + if not self.profiles: + self.log.warning("No profiles present for create burnin") + return + # QUESTION what is this for and should we raise an exception? if "representations" not in instance.data: raise RuntimeError("Burnin needs already created mov to work on.") @@ -137,18 +123,29 @@ def _get_burnins_per_representations(self, instance, src_burnin_defs): return filtered_repres def main_process(self, instance): - # TODO get these data from context - host_name = instance.context.data["hostName"] - task_name = os.environ["AVALON_TASK"] - family = self.main_family_from_instance(instance) + host_name = instance.data["anatomyData"]["app"] + families = list(set(instance.data["family"]).union( + set(instance.data["families"]))) + task_data = instance.data["anatomyData"].get("task", {}) + task_name = task_data.get("name") + task_type = task_data.get("type") + subset = instance.data["subset"] + + filtering_criteria = { + "hosts": host_name, + "families": families, + "task_names": task_name, + "task_types": task_type, + "subset": subset + } + profile = filter_profiles(self.profiles, filtering_criteria, + logger=self.log) - # Find profile most matching current host, task and instance family - profile = self.find_matching_profile(host_name, task_name, family) if not profile: self.log.info(( "Skipped instance. None of profiles in presets are for" - " Host: \"{}\" | Family: \"{}\" | Task \"{}\"" - ).format(host_name, family, task_name)) + " Host: \"{}\" | Families: \"{}\" | Task \"{}\" | Task type \"{}\" | Subset \"{}\" " + ).format(host_name, families, task_name, task_type, subset)) return self.log.debug("profile: {}".format(profile)) @@ -158,8 +155,8 @@ def main_process(self, instance): if not burnin_defs: self.log.info(( "Skipped instance. Burnin definitions are not set for profile" - " Host: \"{}\" | Family: \"{}\" | Task \"{}\" | Profile \"{}\"" - ).format(host_name, family, task_name, profile)) + " Host: \"{}\" | Families: \"{}\" | Task \"{}\" | Profile \"{}\"" + ).format(host_name, families, task_name, profile)) return burnin_options = self._get_burnin_options() @@ -693,130 +690,6 @@ def prepare_repre_data(self, instance, repre, burnin_data, temp_data): ) }) - def find_matching_profile(self, host_name, task_name, family): - """ Filter profiles by Host name, Task name and main Family. - - Filtering keys are "hosts" (list), "tasks" (list), "families" (list). - If key is not find or is empty than it's expected to match. - - Args: - profiles (list): Profiles definition from presets. - host_name (str): Current running host name. - task_name (str): Current context task name. - family (str): Main family of current Instance. - - Returns: - dict/None: Return most matching profile or None if none of profiles - match at least one criteria. - """ - - matching_profiles = None - highest_points = -1 - for profile in self.profiles or tuple(): - profile_points = 0 - profile_value = [] - - # Host filtering - host_names = profile.get("hosts") - match = self.validate_value_by_regexes(host_name, host_names) - if match == -1: - continue - profile_points += match - profile_value.append(bool(match)) - - # Task filtering - task_names = profile.get("tasks") - match = self.validate_value_by_regexes(task_name, task_names) - if match == -1: - continue - profile_points += match - profile_value.append(bool(match)) - - # Family filtering - families = profile.get("families") - match = self.validate_value_by_regexes(family, families) - if match == -1: - continue - profile_points += match - profile_value.append(bool(match)) - - if profile_points > highest_points: - matching_profiles = [] - highest_points = profile_points - - if profile_points == highest_points: - profile["__value__"] = profile_value - matching_profiles.append(profile) - - if not matching_profiles: - return - - if len(matching_profiles) == 1: - return matching_profiles[0] - - return self.profile_exclusion(matching_profiles) - - def profile_exclusion(self, matching_profiles): - """Find out most matching profile by host, task and family match. - - Profiles are selectivelly filtered. Each profile should have - "__value__" key with list of booleans. Each boolean represents - existence of filter for specific key (host, taks, family). - Profiles are looped in sequence. In each sequence are split into - true_list and false_list. For next sequence loop are used profiles in - true_list if there are any profiles else false_list is used. - - Filtering ends when only one profile left in true_list. Or when all - existence booleans loops passed, in that case first profile from left - profiles is returned. - - Args: - matching_profiles (list): Profiles with same values. - - Returns: - dict: Most matching profile. - """ - self.log.info( - "Search for first most matching profile in match order:" - " Host name -> Task name -> Family." - ) - # Filter all profiles with highest points value. First filter profiles - # with matching host if there are any then filter profiles by task - # name if there are any and lastly filter by family. Else use first in - # list. - idx = 0 - final_profile = None - while True: - profiles_true = [] - profiles_false = [] - for profile in matching_profiles: - value = profile["__value__"] - # Just use first profile when idx is greater than values. - if not idx < len(value): - final_profile = profile - break - - if value[idx]: - profiles_true.append(profile) - else: - profiles_false.append(profile) - - if final_profile is not None: - break - - if profiles_true: - matching_profiles = profiles_true - else: - matching_profiles = profiles_false - - if len(matching_profiles) == 1: - final_profile = matching_profiles[0] - break - idx += 1 - - final_profile.pop("__value__") - return final_profile - def filter_burnins_defs(self, profile, instance): """Filter outputs by their values from settings. @@ -909,56 +782,6 @@ def families_filter_validation(self, families, output_families_filter): return True return False - def compile_list_of_regexes(self, in_list): - """Convert strings in entered list to compiled regex objects.""" - regexes = [] - if not in_list: - return regexes - - for item in in_list: - if not item: - continue - - try: - regexes.append(re.compile(item)) - except TypeError: - self.log.warning(( - "Invalid type \"{}\" value \"{}\"." - " Expected string based object. Skipping." - ).format(str(type(item)), str(item))) - - return regexes - - def validate_value_by_regexes(self, value, in_list): - """Validate in any regexe from list match entered value. - - Args: - in_list (list): List with regexes. - value (str): String where regexes is checked. - - Returns: - int: Returns `0` when list is not set or is empty. Returns `1` when - any regex match value and returns `-1` when none of regexes - match value entered. - """ - if not in_list: - return 0 - - output = -1 - regexes = self.compile_list_of_regexes(in_list) - for regex in regexes: - if re.match(regex, value): - output = 1 - break - return output - - def main_family_from_instance(self, instance): - """Return main family of entered instance.""" - family = instance.data.get("family") - if not family: - family = instance.data["families"][0] - return family - def families_from_instance(self, instance): """Return all families of entered instance.""" families = [] From e82f831c5abbccd3fbe98b4483f68f3c105866d3 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 18:13:21 +0100 Subject: [PATCH 099/130] Revert "OP-4465 - updated filter_profiles to handle arrays in values" This reverts commit b50e60987420762d06435478c8a14706c9c4f969. --- openpype/lib/profiles_filtering.py | 31 +++++++++++++----------------- 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/openpype/lib/profiles_filtering.py b/openpype/lib/profiles_filtering.py index e030b197167..370703a68bb 100644 --- a/openpype/lib/profiles_filtering.py +++ b/openpype/lib/profiles_filtering.py @@ -79,11 +79,11 @@ def fullmatch(regex, string, flags=0): return None -def validate_value_by_regexes(values, in_list): +def validate_value_by_regexes(value, in_list): """Validates in any regex from list match entered value. Args: - values (str|list): String where regexes is checked. + value (str): String where regexes is checked. in_list (list): List with regexes. Returns: @@ -102,21 +102,17 @@ def validate_value_by_regexes(values, in_list): # If value is not set and in list has specific values then resolve value # as not matching. - if not values: + if not value: return -1 - if isinstance(values, str): - values = [values] - regexes = compile_list_of_regexes(in_list) for regex in regexes: - for value in values: - if hasattr(regex, "fullmatch"): - result = regex.fullmatch(value) - else: - result = fullmatch(regex, value) - if result: - return 1 + if hasattr(regex, "fullmatch"): + result = regex.fullmatch(value) + else: + result = fullmatch(regex, value) + if result: + return 1 return -1 @@ -140,8 +136,7 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): Args: profiles_data (list): Profile definitions as dictionaries. - key_values (dict): Mapping of Key <-> Value|[Value]. - Key is checked if is + key_values (dict): Mapping of Key <-> Value. Key is checked if is available in profile and if Value is matching it's values. keys_order (list, tuple): Order of keys from `key_values` which matters only when multiple profiles have same score. @@ -186,12 +181,12 @@ def filter_profiles(profiles_data, key_values, keys_order=None, logger=None): profile_scores = [] for key in keys_order: - values = key_values[key] - match = validate_value_by_regexes(values, profile.get(key)) + value = key_values[key] + match = validate_value_by_regexes(value, profile.get(key)) if match == -1: profile_value = profile.get(key) or [] logger.debug( - "\"{}\" not found in \"{}\": {}".format(values, key, + "\"{}\" not found in \"{}\": {}".format(value, key, profile_value) ) profile_points = -1 From a91ae985fbf481560bd9fc28eb78edf020250bff Mon Sep 17 00:00:00 2001 From: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> Date: Mon, 12 Dec 2022 18:15:05 +0100 Subject: [PATCH 100/130] Removed unused import --- openpype/tools/settings/settings/dict_mutable_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/settings/settings/dict_mutable_widget.py b/openpype/tools/settings/settings/dict_mutable_widget.py index b9932da7899..27c93923208 100644 --- a/openpype/tools/settings/settings/dict_mutable_widget.py +++ b/openpype/tools/settings/settings/dict_mutable_widget.py @@ -1,6 +1,6 @@ from uuid import uuid4 -from qtpy import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore from .base import BaseWidget from .lib import ( From 0ee65517f4e34cdbda225f9c472f129375f43cc4 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 18:26:54 +0100 Subject: [PATCH 101/130] OP-4465 - revert wrong defaults Values here were copied from Pyblish filtering, should not be used for profiles. --- openpype/settings/defaults/project_settings/global.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 89d7cf08b74..8c56500646f 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -209,6 +209,9 @@ { "families": [], "hosts": [], + "task_types": [], + "task_names": [], + "subsets": [], "burnins": { "burnin": { "TOP_LEFT": "{yy}-{mm}-{dd}", From ec3e04699877d08b070ed4cfb1fbd0fda3d2a2b5 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 12 Dec 2022 18:30:55 +0100 Subject: [PATCH 102/130] OP-4465 - revert deleted filters families and hosts here are meant for Pyblish filtering, they should stay here for now. --- openpype/plugins/publish/extract_burnin.py | 37 +++++++++++++++++----- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/openpype/plugins/publish/extract_burnin.py b/openpype/plugins/publish/extract_burnin.py index eab7652ae21..f113e61bb09 100644 --- a/openpype/plugins/publish/extract_burnin.py +++ b/openpype/plugins/publish/extract_burnin.py @@ -35,6 +35,26 @@ class ExtractBurnin(publish.Extractor): label = "Extract burnins" order = pyblish.api.ExtractorOrder + 0.03 + families = ["review", "burnin"] + hosts = [ + "nuke", + "maya", + "shell", + "hiero", + "premiere", + "traypublisher", + "standalonepublisher", + "harmony", + "fusion", + "aftereffects", + "tvpaint", + "webpublisher", + "aftereffects", + "photoshop", + "flame" + # "resolve" + ] + optional = True positions = [ @@ -123,9 +143,8 @@ def _get_burnins_per_representations(self, instance, src_burnin_defs): return filtered_repres def main_process(self, instance): - host_name = instance.data["anatomyData"]["app"] - families = list(set(instance.data["family"]).union( - set(instance.data["families"]))) + host_name = instance.context.data["hostName"] + family = instance.data["family"] task_data = instance.data["anatomyData"].get("task", {}) task_name = task_data.get("name") task_type = task_data.get("type") @@ -133,7 +152,7 @@ def main_process(self, instance): filtering_criteria = { "hosts": host_name, - "families": families, + "families": family, "task_names": task_name, "task_types": task_type, "subset": subset @@ -144,8 +163,9 @@ def main_process(self, instance): if not profile: self.log.info(( "Skipped instance. None of profiles in presets are for" - " Host: \"{}\" | Families: \"{}\" | Task \"{}\" | Task type \"{}\" | Subset \"{}\" " - ).format(host_name, families, task_name, task_type, subset)) + " Host: \"{}\" | Families: \"{}\" | Task \"{}\"" + " | Task type \"{}\" | Subset \"{}\" " + ).format(host_name, family, task_name, task_type, subset)) return self.log.debug("profile: {}".format(profile)) @@ -155,8 +175,9 @@ def main_process(self, instance): if not burnin_defs: self.log.info(( "Skipped instance. Burnin definitions are not set for profile" - " Host: \"{}\" | Families: \"{}\" | Task \"{}\" | Profile \"{}\"" - ).format(host_name, families, task_name, profile)) + " Host: \"{}\" | Families: \"{}\" | Task \"{}\"" + " | Profile \"{}\"" + ).format(host_name, family, task_name, profile)) return burnin_options = self._get_burnin_options() From 76f1cbc2e3b93df36cc7bdc140e86ec5d0051f14 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 12 Dec 2022 18:31:19 +0100 Subject: [PATCH 103/130] print help instead of invokinf 'interactive' command --- openpype/cli.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/cli.py b/openpype/cli.py index 7611915d840..2c322205229 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -34,7 +34,8 @@ def main(ctx): # Default command for headless openpype is 'interactive' command # otherwise 'tray' is used. if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1": - ctx.invoke(interactive) + print(ctx.get_help()) + sys.exit(0) else: ctx.invoke(tray) From b05bcf94258c52578f9758205e3fb74d220e645b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 12 Dec 2022 18:34:07 +0100 Subject: [PATCH 104/130] Change comment --- openpype/cli.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/openpype/cli.py b/openpype/cli.py index 2c322205229..897106c35f0 100644 --- a/openpype/cli.py +++ b/openpype/cli.py @@ -31,8 +31,7 @@ def main(ctx): """ if ctx.invoked_subcommand is None: - # Default command for headless openpype is 'interactive' command - # otherwise 'tray' is used. + # Print help if headless mode is used if os.environ.get("OPENPYPE_HEADLESS_MODE") == "1": print(ctx.get_help()) sys.exit(0) From 7e129a8526f92ea2f2b92e959e673ff528beeb8a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 13 Dec 2022 11:39:23 +0100 Subject: [PATCH 105/130] OP-4465 - added missed settings schema --- .../schemas/schema_global_publish.json | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json index f2ada5fd8dd..5388d04bc90 100644 --- a/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json +++ b/openpype/settings/entities/schemas/projects_schema/schemas/schema_global_publish.json @@ -526,11 +526,28 @@ "object_type": "text" }, { - "type": "hosts-enum", "key": "hosts", - "label": "Hosts", + "label": "Host names", + "type": "hosts-enum", "multiselection": true }, + { + "key": "task_types", + "label": "Task types", + "type": "task-types-enum" + }, + { + "key": "task_names", + "label": "Task names", + "type": "list", + "object_type": "text" + }, + { + "key": "subsets", + "label": "Subset names", + "type": "list", + "object_type": "text" + }, { "type": "splitter" }, From 54780b477aa1303585e2bfe19fd57ab6d36a087c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 14:00:24 +0100 Subject: [PATCH 106/130] use new interface class name for publish host --- openpype/hosts/houdini/api/pipeline.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/houdini/api/pipeline.py b/openpype/hosts/houdini/api/pipeline.py index b0791fcb6c7..f8e2c16d216 100644 --- a/openpype/hosts/houdini/api/pipeline.py +++ b/openpype/hosts/houdini/api/pipeline.py @@ -7,7 +7,7 @@ import hou # noqa -from openpype.host import HostBase, IWorkfileHost, ILoadHost, INewPublisher +from openpype.host import HostBase, IWorkfileHost, ILoadHost, IPublishHost import pyblish.api @@ -40,7 +40,7 @@ INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, INewPublisher): +class HoudiniHost(HostBase, IWorkfileHost, ILoadHost, IPublishHost): name = "houdini" def __init__(self): From 8a40489d28a4aab9af9cdf047e83afda729515ec Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 15:16:29 +0100 Subject: [PATCH 107/130] launcher is using qtpy import instead of Qt --- openpype/tools/launcher/actions.py | 2 +- openpype/tools/launcher/constants.py | 2 +- openpype/tools/launcher/delegates.py | 2 +- openpype/tools/launcher/lib.py | 2 +- openpype/tools/launcher/models.py | 2 +- openpype/tools/launcher/widgets.py | 2 +- openpype/tools/launcher/window.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/openpype/tools/launcher/actions.py b/openpype/tools/launcher/actions.py index 34d06f72cc8..61660ee9b7b 100644 --- a/openpype/tools/launcher/actions.py +++ b/openpype/tools/launcher/actions.py @@ -1,6 +1,6 @@ import os -from Qt import QtWidgets, QtGui +from qtpy import QtWidgets, QtGui from openpype import PLUGINS_DIR from openpype import style diff --git a/openpype/tools/launcher/constants.py b/openpype/tools/launcher/constants.py index 61f631759be..cb0049055cb 100644 --- a/openpype/tools/launcher/constants.py +++ b/openpype/tools/launcher/constants.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore ACTION_ROLE = QtCore.Qt.UserRole diff --git a/openpype/tools/launcher/delegates.py b/openpype/tools/launcher/delegates.py index 7b536587279..02a40861d2e 100644 --- a/openpype/tools/launcher/delegates.py +++ b/openpype/tools/launcher/delegates.py @@ -1,5 +1,5 @@ import time -from Qt import QtCore, QtWidgets, QtGui +from qtpy import QtCore, QtWidgets, QtGui from .constants import ( ANIMATION_START_ROLE, ANIMATION_STATE_ROLE, diff --git a/openpype/tools/launcher/lib.py b/openpype/tools/launcher/lib.py index 68e57c6b92d..2507b6eddc8 100644 --- a/openpype/tools/launcher/lib.py +++ b/openpype/tools/launcher/lib.py @@ -1,5 +1,5 @@ import os -from Qt import QtGui +from qtpy import QtGui import qtawesome from openpype import resources diff --git a/openpype/tools/launcher/models.py b/openpype/tools/launcher/models.py index 6e3b5310188..6c763544a90 100644 --- a/openpype/tools/launcher/models.py +++ b/openpype/tools/launcher/models.py @@ -6,7 +6,7 @@ import time import appdirs -from Qt import QtCore, QtGui +from qtpy import QtCore, QtGui import qtawesome from openpype.client import ( diff --git a/openpype/tools/launcher/widgets.py b/openpype/tools/launcher/widgets.py index 774ceb659d3..3eb641bdb33 100644 --- a/openpype/tools/launcher/widgets.py +++ b/openpype/tools/launcher/widgets.py @@ -1,7 +1,7 @@ import copy import time import collections -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui import qtawesome from openpype.tools.flickcharm import FlickCharm diff --git a/openpype/tools/launcher/window.py b/openpype/tools/launcher/window.py index a9eaa932bba..f68fc4befc7 100644 --- a/openpype/tools/launcher/window.py +++ b/openpype/tools/launcher/window.py @@ -1,7 +1,7 @@ import copy import logging -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import style from openpype import resources From c8872b9b6e7674a079dcbbfc21ff1492f8fb32b9 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 15:18:21 +0100 Subject: [PATCH 108/130] tray is using qtpy instead of Qt --- openpype/tools/tray/pype_info_widget.py | 2 +- openpype/tools/tray/pype_tray.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/tools/tray/pype_info_widget.py b/openpype/tools/tray/pype_info_widget.py index 232d2024ac7..c616ad4dbac 100644 --- a/openpype/tools/tray/pype_info_widget.py +++ b/openpype/tools/tray/pype_info_widget.py @@ -2,7 +2,7 @@ import json import collections -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets from openpype import style from openpype import resources diff --git a/openpype/tools/tray/pype_tray.py b/openpype/tools/tray/pype_tray.py index d4189af4d86..df18325beca 100644 --- a/openpype/tools/tray/pype_tray.py +++ b/openpype/tools/tray/pype_tray.py @@ -6,7 +6,7 @@ import platform -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets import openpype.version from openpype import resources, style From 23e9f120cf6152228b1f6c0ee735889f0433fb5c Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 13 Dec 2022 15:19:13 +0100 Subject: [PATCH 109/130] :recycle: fix 5.1 compatibility --- .../UE_5.0/Source/OpenPype/Private/AssetContainer.cpp | 8 ++++---- .../Source/OpenPype/Private/OpenPypePublishInstance.cpp | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp index c766f87a8ef..4965ae2aab8 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp @@ -30,8 +30,8 @@ void UAssetContainer::OnAssetAdded(const FAssetData& AssetData) // get asset path and class FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); - + FString assetFName = AssetData.AssetClassPath.ToString(); + UE_LOG(LogTemp, Log, TEXT("asset name %s"), *assetFName); // split path assetPath.ParseIntoArray(split, TEXT(" "), true); @@ -60,7 +60,7 @@ void UAssetContainer::OnAssetRemoved(const FAssetData& AssetData) // get asset path and class FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + FString assetFName = AssetData.AssetClassPath.ToString(); // split path assetPath.ParseIntoArray(split, TEXT(" "), true); @@ -93,7 +93,7 @@ void UAssetContainer::OnAssetRenamed(const FAssetData& AssetData, const FString& // get asset path and class FString assetPath = AssetData.GetFullName(); - FString assetFName = AssetData.AssetClass.ToString(); + FString assetFName = AssetData.AssetClassPath.ToString(); // split path assetPath.ParseIntoArray(split, TEXT(" "), true); diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index 322663eeec8..c432ebb7e42 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -4,7 +4,7 @@ #include "AssetRegistryModule.h" #include "AssetToolsModule.h" #include "Framework/Notifications/NotificationManager.h" -#include "SNotificationList.h" +#include "Widgets/Notifications/SNotificationList.h" //Moves all the invalid pointers to the end to prepare them for the shrinking #define REMOVE_INVALID_ENTRIES(VAR) VAR.CompactStable(); \ @@ -47,7 +47,7 @@ void UOpenPypePublishInstance::OnAssetCreated(const FAssetData& InAssetData) if (!IsValid(Asset)) { UE_LOG(LogAssetData, Warning, TEXT("Asset \"%s\" is not valid! Skipping the addition."), - *InAssetData.ObjectPath.ToString()); + *InAssetData.GetObjectPathString()); return; } From b487b90fa0ac0943b02bc7b61575163780fb20fd Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 13 Dec 2022 15:19:29 +0100 Subject: [PATCH 110/130] :recycle: update build options --- .../integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs index fcfd268234f..67db648b2aa 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/OpenPype.Build.cs @@ -6,7 +6,11 @@ public class OpenPype : ModuleRules { public OpenPype(ReadOnlyTargetRules Target) : base(Target) { + DefaultBuildSettings = BuildSettingsVersion.V2; + bLegacyPublicIncludePaths = false; + ShadowVariableWarningLevel = WarningLevel.Error; PCHUsage = ModuleRules.PCHUsageMode.UseExplicitOrSharedPCHs; + IncludeOrderVersion = EngineIncludeOrderVersion.Unreal5_0; PublicIncludePaths.AddRange( new string[] { From 4a69f9c6668c6b79a474889fae64724da9334569 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 15:20:41 +0100 Subject: [PATCH 111/130] standalone publisher is using qtpy instead of Qt --- openpype/tools/standalonepublish/app.py | 2 +- openpype/tools/standalonepublish/widgets/__init__.py | 2 +- openpype/tools/standalonepublish/widgets/model_asset.py | 2 +- .../standalonepublish/widgets/model_filter_proxy_exact_match.py | 2 +- .../widgets/model_filter_proxy_recursive_sort.py | 2 +- .../tools/standalonepublish/widgets/model_tasks_template.py | 2 +- openpype/tools/standalonepublish/widgets/model_tree.py | 2 +- .../standalonepublish/widgets/model_tree_view_deselectable.py | 2 +- openpype/tools/standalonepublish/widgets/widget_asset.py | 2 +- .../tools/standalonepublish/widgets/widget_component_item.py | 2 +- openpype/tools/standalonepublish/widgets/widget_components.py | 2 +- .../tools/standalonepublish/widgets/widget_components_list.py | 2 +- openpype/tools/standalonepublish/widgets/widget_drop_empty.py | 2 +- openpype/tools/standalonepublish/widgets/widget_drop_frame.py | 2 +- openpype/tools/standalonepublish/widgets/widget_family.py | 2 +- openpype/tools/standalonepublish/widgets/widget_family_desc.py | 2 +- openpype/tools/standalonepublish/widgets/widget_shadow.py | 2 +- 17 files changed, 17 insertions(+), 17 deletions(-) diff --git a/openpype/tools/standalonepublish/app.py b/openpype/tools/standalonepublish/app.py index c93c33b2a52..d71c205c3b9 100644 --- a/openpype/tools/standalonepublish/app.py +++ b/openpype/tools/standalonepublish/app.py @@ -4,7 +4,7 @@ import signal from bson.objectid import ObjectId -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.client import get_asset_by_id diff --git a/openpype/tools/standalonepublish/widgets/__init__.py b/openpype/tools/standalonepublish/widgets/__init__.py index e61897f8078..d79654498dd 100644 --- a/openpype/tools/standalonepublish/widgets/__init__.py +++ b/openpype/tools/standalonepublish/widgets/__init__.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore HelpRole = QtCore.Qt.UserRole + 2 FamilyRole = QtCore.Qt.UserRole + 3 diff --git a/openpype/tools/standalonepublish/widgets/model_asset.py b/openpype/tools/standalonepublish/widgets/model_asset.py index 9fed46b3fe8..2f67036e78d 100644 --- a/openpype/tools/standalonepublish/widgets/model_asset.py +++ b/openpype/tools/standalonepublish/widgets/model_asset.py @@ -1,7 +1,7 @@ import logging import collections -from Qt import QtCore, QtGui +from qtpy import QtCore, QtGui import qtawesome from openpype.client import get_assets diff --git a/openpype/tools/standalonepublish/widgets/model_filter_proxy_exact_match.py b/openpype/tools/standalonepublish/widgets/model_filter_proxy_exact_match.py index 604ae30934a..df9c6fb35f9 100644 --- a/openpype/tools/standalonepublish/widgets/model_filter_proxy_exact_match.py +++ b/openpype/tools/standalonepublish/widgets/model_filter_proxy_exact_match.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore class ExactMatchesFilterProxyModel(QtCore.QSortFilterProxyModel): diff --git a/openpype/tools/standalonepublish/widgets/model_filter_proxy_recursive_sort.py b/openpype/tools/standalonepublish/widgets/model_filter_proxy_recursive_sort.py index 71ecdf41dc1..727d3a97d70 100644 --- a/openpype/tools/standalonepublish/widgets/model_filter_proxy_recursive_sort.py +++ b/openpype/tools/standalonepublish/widgets/model_filter_proxy_recursive_sort.py @@ -1,5 +1,5 @@ -from Qt import QtCore import re +from qtpy import QtCore class RecursiveSortFilterProxyModel(QtCore.QSortFilterProxyModel): diff --git a/openpype/tools/standalonepublish/widgets/model_tasks_template.py b/openpype/tools/standalonepublish/widgets/model_tasks_template.py index 648f7ed4790..e22a4e3bf83 100644 --- a/openpype/tools/standalonepublish/widgets/model_tasks_template.py +++ b/openpype/tools/standalonepublish/widgets/model_tasks_template.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore import qtawesome from openpype.style import get_default_entity_icon_color diff --git a/openpype/tools/standalonepublish/widgets/model_tree.py b/openpype/tools/standalonepublish/widgets/model_tree.py index efac0d6b781..040e95d944b 100644 --- a/openpype/tools/standalonepublish/widgets/model_tree.py +++ b/openpype/tools/standalonepublish/widgets/model_tree.py @@ -1,4 +1,4 @@ -from Qt import QtCore +from qtpy import QtCore from . import Node diff --git a/openpype/tools/standalonepublish/widgets/model_tree_view_deselectable.py b/openpype/tools/standalonepublish/widgets/model_tree_view_deselectable.py index 6a15916981a..3c8c760eca3 100644 --- a/openpype/tools/standalonepublish/widgets/model_tree_view_deselectable.py +++ b/openpype/tools/standalonepublish/widgets/model_tree_view_deselectable.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore class DeselectableTreeView(QtWidgets.QTreeView): diff --git a/openpype/tools/standalonepublish/widgets/widget_asset.py b/openpype/tools/standalonepublish/widgets/widget_asset.py index 77d756a606b..01f49b79ec3 100644 --- a/openpype/tools/standalonepublish/widgets/widget_asset.py +++ b/openpype/tools/standalonepublish/widgets/widget_asset.py @@ -1,5 +1,5 @@ import contextlib -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import qtawesome from openpype.client import ( diff --git a/openpype/tools/standalonepublish/widgets/widget_component_item.py b/openpype/tools/standalonepublish/widgets/widget_component_item.py index de3cde50cd5..523c3977e3b 100644 --- a/openpype/tools/standalonepublish/widgets/widget_component_item.py +++ b/openpype/tools/standalonepublish/widgets/widget_component_item.py @@ -1,5 +1,5 @@ import os -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets from .resources import get_resource diff --git a/openpype/tools/standalonepublish/widgets/widget_components.py b/openpype/tools/standalonepublish/widgets/widget_components.py index 237e1da5830..a86ac845f27 100644 --- a/openpype/tools/standalonepublish/widgets/widget_components.py +++ b/openpype/tools/standalonepublish/widgets/widget_components.py @@ -4,7 +4,7 @@ import random import string -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.pipeline import legacy_io from openpype.lib import ( diff --git a/openpype/tools/standalonepublish/widgets/widget_components_list.py b/openpype/tools/standalonepublish/widgets/widget_components_list.py index 0ee90ae4de9..e29ab3c127f 100644 --- a/openpype/tools/standalonepublish/widgets/widget_components_list.py +++ b/openpype/tools/standalonepublish/widgets/widget_components_list.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets class ComponentsList(QtWidgets.QTableWidget): diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_empty.py b/openpype/tools/standalonepublish/widgets/widget_drop_empty.py index a890f384264..110e4d6353d 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_empty.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_empty.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui class DropEmpty(QtWidgets.QWidget): diff --git a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py index 18c2b276782..f46e31786c5 100644 --- a/openpype/tools/standalonepublish/widgets/widget_drop_frame.py +++ b/openpype/tools/standalonepublish/widgets/widget_drop_frame.py @@ -4,7 +4,7 @@ import clique import subprocess import openpype.lib -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from . import DropEmpty, ComponentsList, ComponentItem diff --git a/openpype/tools/standalonepublish/widgets/widget_family.py b/openpype/tools/standalonepublish/widgets/widget_family.py index e1cbb8d3971..11c5ec33b70 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family.py +++ b/openpype/tools/standalonepublish/widgets/widget_family.py @@ -1,6 +1,6 @@ import re -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from openpype.client import ( get_asset_by_name, diff --git a/openpype/tools/standalonepublish/widgets/widget_family_desc.py b/openpype/tools/standalonepublish/widgets/widget_family_desc.py index 2095b332bd8..33174a852bd 100644 --- a/openpype/tools/standalonepublish/widgets/widget_family_desc.py +++ b/openpype/tools/standalonepublish/widgets/widget_family_desc.py @@ -1,5 +1,5 @@ import six -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui import qtawesome from . import FamilyRole, PluginRole diff --git a/openpype/tools/standalonepublish/widgets/widget_shadow.py b/openpype/tools/standalonepublish/widgets/widget_shadow.py index de5fdf6be0f..64cb9544faa 100644 --- a/openpype/tools/standalonepublish/widgets/widget_shadow.py +++ b/openpype/tools/standalonepublish/widgets/widget_shadow.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui class ShadowWidget(QtWidgets.QWidget): From 69b0c23056e6c81ac41b5be9bdb75ac75505cb9e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 15:21:00 +0100 Subject: [PATCH 112/130] stdbroker is using qtpy instead of Qt --- openpype/tools/stdout_broker/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/stdout_broker/window.py b/openpype/tools/stdout_broker/window.py index f5720ca05b6..5825da73e27 100644 --- a/openpype/tools/stdout_broker/window.py +++ b/openpype/tools/stdout_broker/window.py @@ -1,7 +1,7 @@ import re import collections -from Qt import QtWidgets +from qtpy import QtWidgets from openpype import style From 4d0e8180674eb0db2ba402fc8532ac9624309dcf Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Tue, 13 Dec 2022 15:33:15 +0100 Subject: [PATCH 113/130] :recycle: fix includes --- .../UE_5.0/Source/OpenPype/Private/AssetContainer.cpp | 2 +- .../UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp | 2 +- .../integration/UE_5.0/Source/OpenPype/Public/AssetContainer.h | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp index 4965ae2aab8..61e563f7294 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/AssetContainer.cpp @@ -1,7 +1,7 @@ // Fill out your copyright notice in the Description page of Project Settings. #include "AssetContainer.h" -#include "AssetRegistryModule.h" +#include "AssetRegistry/AssetRegistryModule.h" #include "Misc/PackageName.h" #include "Engine.h" #include "Containers/UnrealString.h" diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp index c432ebb7e42..f5eb6f9e700 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Private/OpenPypePublishInstance.cpp @@ -1,7 +1,7 @@ #pragma once #include "OpenPypePublishInstance.h" -#include "AssetRegistryModule.h" +#include "AssetRegistry/AssetRegistryModule.h" #include "AssetToolsModule.h" #include "Framework/Notifications/NotificationManager.h" #include "Widgets/Notifications/SNotificationList.h" diff --git a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/AssetContainer.h b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/AssetContainer.h index 3c2a360c78e..2c06e59d6fb 100644 --- a/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/AssetContainer.h +++ b/openpype/hosts/unreal/integration/UE_5.0/Source/OpenPype/Public/AssetContainer.h @@ -5,7 +5,7 @@ #include "CoreMinimal.h" #include "UObject/NoExportTypes.h" #include "Engine/AssetUserData.h" -#include "AssetData.h" +#include "AssetRegistry/AssetData.h" #include "AssetContainer.generated.h" /** From e989dbecdb28b7b7bfb8b2d415d081bed919b279 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:19:06 +0100 Subject: [PATCH 114/130] project manager is using qtpy instead of Qt module --- openpype/tools/project_manager/project_manager/__init__.py | 2 +- openpype/tools/project_manager/project_manager/constants.py | 2 +- openpype/tools/project_manager/project_manager/delegates.py | 2 +- openpype/tools/project_manager/project_manager/model.py | 2 +- .../project_manager/project_manager/multiselection_combobox.py | 2 +- openpype/tools/project_manager/project_manager/style.py | 2 +- openpype/tools/project_manager/project_manager/view.py | 2 +- openpype/tools/project_manager/project_manager/widgets.py | 2 +- openpype/tools/project_manager/project_manager/window.py | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/openpype/tools/project_manager/project_manager/__init__.py b/openpype/tools/project_manager/project_manager/__init__.py index 6e44afd841f..ac4e3d5f392 100644 --- a/openpype/tools/project_manager/project_manager/__init__.py +++ b/openpype/tools/project_manager/project_manager/__init__.py @@ -44,7 +44,7 @@ def main(): import sys - from Qt import QtWidgets + from qtpy import QtWidgets app = QtWidgets.QApplication([]) diff --git a/openpype/tools/project_manager/project_manager/constants.py b/openpype/tools/project_manager/project_manager/constants.py index 7ca4aa94928..72512d797b2 100644 --- a/openpype/tools/project_manager/project_manager/constants.py +++ b/openpype/tools/project_manager/project_manager/constants.py @@ -1,5 +1,5 @@ import re -from Qt import QtCore +from qtpy import QtCore # Item identifier (unique ID - uuid4 is used) diff --git a/openpype/tools/project_manager/project_manager/delegates.py b/openpype/tools/project_manager/project_manager/delegates.py index b066bbb1599..79e9554b0f6 100644 --- a/openpype/tools/project_manager/project_manager/delegates.py +++ b/openpype/tools/project_manager/project_manager/delegates.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from .widgets import ( NameTextEdit, diff --git a/openpype/tools/project_manager/project_manager/model.py b/openpype/tools/project_manager/project_manager/model.py index 6f40140e5e1..29a26f700f7 100644 --- a/openpype/tools/project_manager/project_manager/model.py +++ b/openpype/tools/project_manager/project_manager/model.py @@ -5,7 +5,7 @@ from pymongo import UpdateOne, DeleteOne -from Qt import QtCore, QtGui +from qtpy import QtCore, QtGui from openpype.client import ( get_projects, diff --git a/openpype/tools/project_manager/project_manager/multiselection_combobox.py b/openpype/tools/project_manager/project_manager/multiselection_combobox.py index f7768312988..f12f402d1ad 100644 --- a/openpype/tools/project_manager/project_manager/multiselection_combobox.py +++ b/openpype/tools/project_manager/project_manager/multiselection_combobox.py @@ -1,4 +1,4 @@ -from Qt import QtCore, QtWidgets +from qtpy import QtCore, QtWidgets class ComboItemDelegate(QtWidgets.QStyledItemDelegate): diff --git a/openpype/tools/project_manager/project_manager/style.py b/openpype/tools/project_manager/project_manager/style.py index 4405d059604..6445bc341d8 100644 --- a/openpype/tools/project_manager/project_manager/style.py +++ b/openpype/tools/project_manager/project_manager/style.py @@ -1,5 +1,5 @@ import os -from Qt import QtGui +from qtpy import QtGui import qtawesome from openpype.tools.utils import paint_image_with_color diff --git a/openpype/tools/project_manager/project_manager/view.py b/openpype/tools/project_manager/project_manager/view.py index 8d1fe54e831..609db30a817 100644 --- a/openpype/tools/project_manager/project_manager/view.py +++ b/openpype/tools/project_manager/project_manager/view.py @@ -1,7 +1,7 @@ import collections from queue import Queue -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.client import get_project from .delegates import ( diff --git a/openpype/tools/project_manager/project_manager/widgets.py b/openpype/tools/project_manager/project_manager/widgets.py index 4bc968347a1..06ae06e4d29 100644 --- a/openpype/tools/project_manager/project_manager/widgets.py +++ b/openpype/tools/project_manager/project_manager/widgets.py @@ -16,7 +16,7 @@ get_warning_pixmap ) -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui class NameTextEdit(QtWidgets.QLineEdit): diff --git a/openpype/tools/project_manager/project_manager/window.py b/openpype/tools/project_manager/project_manager/window.py index 3b2dea8ca3c..e35922cf36d 100644 --- a/openpype/tools/project_manager/project_manager/window.py +++ b/openpype/tools/project_manager/project_manager/window.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import resources from openpype.style import load_stylesheet From bd17e0568214475ae3ae6219d411b98d6b9263dc Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:20:39 +0100 Subject: [PATCH 115/130] use qtpy in context dialog --- openpype/tools/context_dialog/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/context_dialog/window.py b/openpype/tools/context_dialog/window.py index 3b544bd3752..86c53b55c53 100644 --- a/openpype/tools/context_dialog/window.py +++ b/openpype/tools/context_dialog/window.py @@ -1,7 +1,7 @@ import os import json -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import style from openpype.pipeline import AvalonMongoDB From 4d98f540739961fb746cf010bfb7329c29fbe95c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:22:35 +0100 Subject: [PATCH 116/130] traypublisher is using qtpy --- openpype/tools/traypublisher/window.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/traypublisher/window.py b/openpype/tools/traypublisher/window.py index dfe06d149de..3007fa66a51 100644 --- a/openpype/tools/traypublisher/window.py +++ b/openpype/tools/traypublisher/window.py @@ -8,7 +8,7 @@ import platform -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import qtawesome import appdirs From b7afb84d6ca19df23b5bcfef7e451bfc65e749f7 Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Tue, 13 Dec 2022 17:28:20 +0100 Subject: [PATCH 117/130] Fix - join needs list --- openpype/tools/attribute_defs/files_widget.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/tools/attribute_defs/files_widget.py b/openpype/tools/attribute_defs/files_widget.py index 738e50ba071..2c8ed729c23 100644 --- a/openpype/tools/attribute_defs/files_widget.py +++ b/openpype/tools/attribute_defs/files_widget.py @@ -155,7 +155,7 @@ def _update_items_label(self): extensions_label = " or ".join(allowed_items) else: last_item = allowed_items.pop(-1) - new_last_item = " or ".join(last_item, allowed_items.pop(-1)) + new_last_item = " or ".join([last_item, allowed_items.pop(-1)]) allowed_items.append(new_last_item) extensions_label = ", ".join(allowed_items) From a66e421fad3dda9a1620d307b68f01dce9a6cc82 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:46:43 +0100 Subject: [PATCH 118/130] use qtpy in module tools --- openpype/modules/avalon_apps/avalon_app.py | 4 ++-- openpype/modules/clockify/clockify_module.py | 2 +- openpype/modules/clockify/widgets.py | 2 +- .../example_addons/example_addon/widgets.py | 2 +- openpype/modules/ftrack/tray/ftrack_tray.py | 4 ++-- openpype/modules/ftrack/tray/login_dialog.py | 7 +++++-- openpype/modules/interfaces.py | 10 ++++----- openpype/modules/kitsu/kitsu_widgets.py | 2 +- .../modules/log_viewer/log_view_module.py | 2 +- openpype/modules/log_viewer/tray/app.py | 2 +- openpype/modules/log_viewer/tray/models.py | 2 +- openpype/modules/log_viewer/tray/widgets.py | 2 +- openpype/modules/muster/muster.py | 2 +- openpype/modules/muster/widget_login.py | 3 +-- .../window/widgets.py | 2 +- .../shotgrid/tray/credential_dialog.py | 2 +- .../modules/shotgrid/tray/shotgrid_tray.py | 2 +- .../modules/sync_server/sync_server_module.py | 2 +- openpype/modules/sync_server/tray/app.py | 2 +- .../modules/sync_server/tray/delegates.py | 2 +- openpype/modules/sync_server/tray/models.py | 21 +++++++++---------- openpype/modules/sync_server/tray/widgets.py | 12 +++++------ .../modules/timers_manager/idle_threads.py | 2 +- .../timers_manager/widget_user_idle.py | 2 +- .../webserver/host_console_listener.py | 2 +- 25 files changed, 49 insertions(+), 48 deletions(-) diff --git a/openpype/modules/avalon_apps/avalon_app.py b/openpype/modules/avalon_apps/avalon_app.py index f9085522b01..a0226ecc5ce 100644 --- a/openpype/modules/avalon_apps/avalon_app.py +++ b/openpype/modules/avalon_apps/avalon_app.py @@ -57,7 +57,7 @@ def tray_menu(self, tray_menu): if not self._library_loader_imported: return - from Qt import QtWidgets + from qtpy import QtWidgets # Actions action_library_loader = QtWidgets.QAction( "Loader", tray_menu @@ -75,7 +75,7 @@ def tray_exit(self, *_a, **_kw): def show_library_loader(self): if self._library_loader_window is None: - from Qt import QtCore + from qtpy import QtCore from openpype.tools.libraryloader import LibraryLoaderWindow from openpype.pipeline import install_openpype_plugins diff --git a/openpype/modules/clockify/clockify_module.py b/openpype/modules/clockify/clockify_module.py index 14fcb01f67d..300d5576e2a 100644 --- a/openpype/modules/clockify/clockify_module.py +++ b/openpype/modules/clockify/clockify_module.py @@ -183,7 +183,7 @@ def on_message_widget_close(self): # Definition of Tray menu def tray_menu(self, parent_menu): # Menu for Tray App - from Qt import QtWidgets + from qtpy import QtWidgets menu = QtWidgets.QMenu("Clockify", parent_menu) menu.setProperty("submenu", "on") diff --git a/openpype/modules/clockify/widgets.py b/openpype/modules/clockify/widgets.py index d58df3c0676..122b6212c0e 100644 --- a/openpype/modules/clockify/widgets.py +++ b/openpype/modules/clockify/widgets.py @@ -1,4 +1,4 @@ -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets from openpype import resources, style diff --git a/openpype/modules/example_addons/example_addon/widgets.py b/openpype/modules/example_addons/example_addon/widgets.py index c0a0a7e5106..cd0da3ae437 100644 --- a/openpype/modules/example_addons/example_addon/widgets.py +++ b/openpype/modules/example_addons/example_addon/widgets.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.style import load_stylesheet diff --git a/openpype/modules/ftrack/tray/ftrack_tray.py b/openpype/modules/ftrack/tray/ftrack_tray.py index e3c6e30ead7..8718dff4343 100644 --- a/openpype/modules/ftrack/tray/ftrack_tray.py +++ b/openpype/modules/ftrack/tray/ftrack_tray.py @@ -3,9 +3,9 @@ import datetime import threading -from Qt import QtCore, QtWidgets, QtGui - import ftrack_api +from qtpy import QtCore, QtWidgets, QtGui + from openpype import resources from openpype.lib import Logger from openpype_modules.ftrack import resolve_ftrack_url, FTRACK_MODULE_DIR diff --git a/openpype/modules/ftrack/tray/login_dialog.py b/openpype/modules/ftrack/tray/login_dialog.py index 05d9226ca4e..fbb3455775e 100644 --- a/openpype/modules/ftrack/tray/login_dialog.py +++ b/openpype/modules/ftrack/tray/login_dialog.py @@ -1,10 +1,13 @@ import os + import requests +from qtpy import QtCore, QtGui, QtWidgets + from openpype import style from openpype_modules.ftrack.lib import credentials -from . import login_tools from openpype import resources -from Qt import QtCore, QtGui, QtWidgets + +from . import login_tools class CredentialsDialog(QtWidgets.QDialog): diff --git a/openpype/modules/interfaces.py b/openpype/modules/interfaces.py index f92ec6bf2d6..7cd299df67a 100644 --- a/openpype/modules/interfaces.py +++ b/openpype/modules/interfaces.py @@ -222,7 +222,7 @@ def on_action_trigger(self): pass def tray_menu(self, tray_menu): - from Qt import QtWidgets + from qtpy import QtWidgets if self.admin_action: menu = self.admin_submenu(tray_menu) @@ -247,7 +247,7 @@ def tray_exit(self): @staticmethod def admin_submenu(tray_menu): if ITrayAction._admin_submenu is None: - from Qt import QtWidgets + from qtpy import QtWidgets admin_submenu = QtWidgets.QMenu("Admin", tray_menu) admin_submenu.menuAction().setVisible(False) @@ -279,7 +279,7 @@ def label(self): @staticmethod def services_submenu(tray_menu): if ITrayService._services_submenu is None: - from Qt import QtWidgets + from qtpy import QtWidgets services_submenu = QtWidgets.QMenu("Services", tray_menu) services_submenu.menuAction().setVisible(False) @@ -294,7 +294,7 @@ def add_service_action(action): @staticmethod def _load_service_icons(): - from Qt import QtGui + from qtpy import QtGui ITrayService._failed_icon = QtGui.QIcon( resources.get_resource("icons", "circle_red.png") @@ -325,7 +325,7 @@ def get_icon_failed(): return ITrayService._failed_icon def tray_menu(self, tray_menu): - from Qt import QtWidgets + from qtpy import QtWidgets action = QtWidgets.QAction( self.label, diff --git a/openpype/modules/kitsu/kitsu_widgets.py b/openpype/modules/kitsu/kitsu_widgets.py index 65baed9665e..5ff36135837 100644 --- a/openpype/modules/kitsu/kitsu_widgets.py +++ b/openpype/modules/kitsu/kitsu_widgets.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import style from openpype.modules.kitsu.utils.credentials import ( diff --git a/openpype/modules/log_viewer/log_view_module.py b/openpype/modules/log_viewer/log_view_module.py index 31e954fadd0..e9dba2041c4 100644 --- a/openpype/modules/log_viewer/log_view_module.py +++ b/openpype/modules/log_viewer/log_view_module.py @@ -22,7 +22,7 @@ def tray_init(self): # Definition of Tray menu def tray_menu(self, tray_menu): - from Qt import QtWidgets + from qtpy import QtWidgets # Menu for Tray App menu = QtWidgets.QMenu('Logging', tray_menu) diff --git a/openpype/modules/log_viewer/tray/app.py b/openpype/modules/log_viewer/tray/app.py index def319e0e3f..3c49f337d43 100644 --- a/openpype/modules/log_viewer/tray/app.py +++ b/openpype/modules/log_viewer/tray/app.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore from .widgets import LogsWidget, OutputWidget from openpype import style diff --git a/openpype/modules/log_viewer/tray/models.py b/openpype/modules/log_viewer/tray/models.py index d369ffeb64d..bc1f54c06c6 100644 --- a/openpype/modules/log_viewer/tray/models.py +++ b/openpype/modules/log_viewer/tray/models.py @@ -1,5 +1,5 @@ import collections -from Qt import QtCore, QtGui +from qtpy import QtCore, QtGui from openpype.lib import Logger diff --git a/openpype/modules/log_viewer/tray/widgets.py b/openpype/modules/log_viewer/tray/widgets.py index c7ac64ab70a..981152e6e20 100644 --- a/openpype/modules/log_viewer/tray/widgets.py +++ b/openpype/modules/log_viewer/tray/widgets.py @@ -1,5 +1,5 @@ import html -from Qt import QtCore, QtWidgets +from qtpy import QtCore, QtWidgets import qtawesome from .models import LogModel, LogsFilterProxy diff --git a/openpype/modules/muster/muster.py b/openpype/modules/muster/muster.py index 8d395d16e8c..77b9214a5af 100644 --- a/openpype/modules/muster/muster.py +++ b/openpype/modules/muster/muster.py @@ -53,7 +53,7 @@ def tray_exit(self): # Definition of Tray menu def tray_menu(self, parent): """Add **change credentials** option to tray menu.""" - from Qt import QtWidgets + from qtpy import QtWidgets # Menu for Tray App menu = QtWidgets.QMenu('Muster', parent) diff --git a/openpype/modules/muster/widget_login.py b/openpype/modules/muster/widget_login.py index ae838c6cea7..f38f43fb7f5 100644 --- a/openpype/modules/muster/widget_login.py +++ b/openpype/modules/muster/widget_login.py @@ -1,5 +1,4 @@ -import os -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets from openpype import resources, style diff --git a/openpype/modules/python_console_interpreter/window/widgets.py b/openpype/modules/python_console_interpreter/window/widgets.py index 36ce1b61a26..b670352f44d 100644 --- a/openpype/modules/python_console_interpreter/window/widgets.py +++ b/openpype/modules/python_console_interpreter/window/widgets.py @@ -5,7 +5,7 @@ from code import InteractiveInterpreter import appdirs -from Qt import QtCore, QtWidgets, QtGui +from qtpy import QtCore, QtWidgets, QtGui from openpype import resources from openpype.style import load_stylesheet diff --git a/openpype/modules/shotgrid/tray/credential_dialog.py b/openpype/modules/shotgrid/tray/credential_dialog.py index 9d841d98be3..7b839b63c0c 100644 --- a/openpype/modules/shotgrid/tray/credential_dialog.py +++ b/openpype/modules/shotgrid/tray/credential_dialog.py @@ -1,5 +1,5 @@ import os -from Qt import QtCore, QtWidgets, QtGui +from qtpy import QtCore, QtWidgets, QtGui from openpype import style from openpype import resources diff --git a/openpype/modules/shotgrid/tray/shotgrid_tray.py b/openpype/modules/shotgrid/tray/shotgrid_tray.py index 4038d77b03a..8e363bd318e 100644 --- a/openpype/modules/shotgrid/tray/shotgrid_tray.py +++ b/openpype/modules/shotgrid/tray/shotgrid_tray.py @@ -1,7 +1,7 @@ import os import webbrowser -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.modules.shotgrid.lib import credentials from openpype.modules.shotgrid.tray.credential_dialog import ( diff --git a/openpype/modules/sync_server/sync_server_module.py b/openpype/modules/sync_server/sync_server_module.py index 653ee50541b..f9b99da02b7 100644 --- a/openpype/modules/sync_server/sync_server_module.py +++ b/openpype/modules/sync_server/sync_server_module.py @@ -1244,7 +1244,7 @@ def tray_menu(self, parent_menu): if not self.enabled: return - from Qt import QtWidgets + from qtpy import QtWidgets """Add menu or action to Tray(or parent)'s menu""" action = QtWidgets.QAction(self.label, parent_menu) action.triggered.connect(self.show_widget) diff --git a/openpype/modules/sync_server/tray/app.py b/openpype/modules/sync_server/tray/app.py index 9b9768327ec..c0938351283 100644 --- a/openpype/modules/sync_server/tray/app.py +++ b/openpype/modules/sync_server/tray/app.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.tools.settings import style diff --git a/openpype/modules/sync_server/tray/delegates.py b/openpype/modules/sync_server/tray/delegates.py index 988eb40d28e..e14b2e2f607 100644 --- a/openpype/modules/sync_server/tray/delegates.py +++ b/openpype/modules/sync_server/tray/delegates.py @@ -1,5 +1,5 @@ import os -from Qt import QtCore, QtWidgets, QtGui +from qtpy import QtCore, QtWidgets, QtGui from openpype.lib import Logger diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index d63d046508e..b52f3509079 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -3,8 +3,7 @@ from bson.objectid import ObjectId import datetime -from Qt import QtCore -from Qt.QtCore import Qt +from qtpy import QtCore import qtawesome from openpype.tools.utils.delegates import pretty_timestamp @@ -79,16 +78,16 @@ def rowCount(self, _index): def columnCount(self, _index=None): return len(self._header) - def headerData(self, section, orientation, role=Qt.DisplayRole): + def headerData(self, section, orientation, role=QtCore.Qt.DisplayRole): if section >= len(self.COLUMN_LABELS): return - if role == Qt.DisplayRole: - if orientation == Qt.Horizontal: + if role == QtCore.Qt.DisplayRole: + if orientation == QtCore.Qt.Horizontal: return self.COLUMN_LABELS[section][1] if role == HEADER_NAME_ROLE: - if orientation == Qt.Horizontal: + if orientation == QtCore.Qt.Horizontal: return self.COLUMN_LABELS[section][0] # return name def data(self, index, role): @@ -123,7 +122,7 @@ def data(self, index, role): return item.status == lib.STATUS[2] and \ item.remote_progress < 1 - if role in (Qt.DisplayRole, Qt.EditRole): + if role in (QtCore.Qt.DisplayRole, QtCore.Qt.EditRole): # because of ImageDelegate if header_value in ['remote_site', 'local_site']: return "" @@ -146,7 +145,7 @@ def data(self, index, role): if role == STATUS_ROLE: return item.status - if role == Qt.UserRole: + if role == QtCore.Qt.UserRole: return item._id @property @@ -409,7 +408,7 @@ def get_index(self, id): """ for i in range(self.rowCount(None)): index = self.index(i, 0) - value = self.data(index, Qt.UserRole) + value = self.data(index, QtCore.Qt.UserRole) if value == id: return index return None @@ -917,7 +916,7 @@ def set_priority_data(self, index, value): if not self.can_edit: return - repre_id = self.data(index, Qt.UserRole) + repre_id = self.data(index, QtCore.Qt.UserRole) representation = get_representation_by_id(self.project, repre_id) if representation: @@ -1353,7 +1352,7 @@ def set_priority_data(self, index, value): if not self.can_edit: return - file_id = self.data(index, Qt.UserRole) + file_id = self.data(index, QtCore.Qt.UserRole) updated_file = None representation = get_representation_by_id(self.project, self._id) diff --git a/openpype/modules/sync_server/tray/widgets.py b/openpype/modules/sync_server/tray/widgets.py index c40aa98f248..b9ef45727a2 100644 --- a/openpype/modules/sync_server/tray/widgets.py +++ b/openpype/modules/sync_server/tray/widgets.py @@ -3,8 +3,7 @@ import sys from functools import partial -from Qt import QtWidgets, QtCore, QtGui -from Qt.QtCore import Qt +from qtpy import QtWidgets, QtCore, QtGui import qtawesome from openpype.tools.settings import style @@ -260,7 +259,7 @@ def _selection_changed(self, _new_selected, _all_selected): self._selected_ids = set() for index in idxs: - self._selected_ids.add(self.model.data(index, Qt.UserRole)) + self._selected_ids.add(self.model.data(index, QtCore.Qt.UserRole)) def _set_selection(self): """ @@ -291,7 +290,7 @@ def _double_clicked(self, index): self.table_view.openPersistentEditor(index) return - _id = self.model.data(index, Qt.UserRole) + _id = self.model.data(index, QtCore.Qt.UserRole) detail_window = SyncServerDetailWindow( self.sync_server, _id, self.model.project, parent=self) detail_window.exec() @@ -615,7 +614,7 @@ def __init__(self, sync_server, project=None, parent=None): table_view.setSelectionBehavior( QtWidgets.QAbstractItemView.SelectRows) table_view.horizontalHeader().setSortIndicator( - -1, Qt.AscendingOrder) + -1, QtCore.Qt.AscendingOrder) table_view.setAlternatingRowColors(True) table_view.verticalHeader().hide() table_view.viewport().setAttribute(QtCore.Qt.WA_Hover, True) @@ -773,7 +772,8 @@ def __init__(self, sync_server, _id=None, project=None, parent=None): QtWidgets.QAbstractItemView.ExtendedSelection) table_view.setSelectionBehavior( QtWidgets.QTableView.SelectRows) - table_view.horizontalHeader().setSortIndicator(-1, Qt.AscendingOrder) + table_view.horizontalHeader().setSortIndicator( + -1, QtCore.Qt.AscendingOrder) table_view.horizontalHeader().setSortIndicatorShown(True) table_view.setAlternatingRowColors(True) table_view.verticalHeader().hide() diff --git a/openpype/modules/timers_manager/idle_threads.py b/openpype/modules/timers_manager/idle_threads.py index 72427611432..eb11bbf1176 100644 --- a/openpype/modules/timers_manager/idle_threads.py +++ b/openpype/modules/timers_manager/idle_threads.py @@ -1,5 +1,5 @@ import time -from Qt import QtCore +from qtpy import QtCore from pynput import mouse, keyboard from openpype.lib import Logger diff --git a/openpype/modules/timers_manager/widget_user_idle.py b/openpype/modules/timers_manager/widget_user_idle.py index 1ecea744406..9df328e6b25 100644 --- a/openpype/modules/timers_manager/widget_user_idle.py +++ b/openpype/modules/timers_manager/widget_user_idle.py @@ -1,4 +1,4 @@ -from Qt import QtCore, QtGui, QtWidgets +from qtpy import QtCore, QtGui, QtWidgets from openpype import resources, style diff --git a/openpype/modules/webserver/host_console_listener.py b/openpype/modules/webserver/host_console_listener.py index fdfe1ba6885..e5c11af9c24 100644 --- a/openpype/modules/webserver/host_console_listener.py +++ b/openpype/modules/webserver/host_console_listener.py @@ -3,7 +3,7 @@ import json import logging from concurrent.futures import CancelledError -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.modules import ITrayService From a3db8a0539956a7cc5d2f2b199d5e4abbc18b778 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:47:52 +0100 Subject: [PATCH 119/130] use qtpy in hosts which have use openpype process --- openpype/hosts/aftereffects/api/launch_logic.py | 2 +- openpype/hosts/aftereffects/api/lib.py | 2 +- openpype/hosts/aftereffects/api/pipeline.py | 2 +- openpype/hosts/blender/api/ops.py | 2 +- openpype/hosts/harmony/api/lib.py | 2 +- openpype/hosts/photoshop/api/launch_logic.py | 2 +- openpype/hosts/photoshop/api/lib.py | 2 +- openpype/hosts/photoshop/api/pipeline.py | 2 +- .../hosts/photoshop/plugins/create/create_legacy_image.py | 2 +- openpype/hosts/tvpaint/api/launch_script.py | 2 +- openpype/hosts/tvpaint/plugins/create/create_render_layer.py | 4 ++-- openpype/hosts/unreal/api/tools_ui.py | 2 +- 12 files changed, 13 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/aftereffects/api/launch_logic.py b/openpype/hosts/aftereffects/api/launch_logic.py index 9c8513fe8c1..50675c84824 100644 --- a/openpype/hosts/aftereffects/api/launch_logic.py +++ b/openpype/hosts/aftereffects/api/launch_logic.py @@ -10,7 +10,7 @@ WebSocketAsync ) -from Qt import QtCore +from qtpy import QtCore from openpype.lib import Logger from openpype.pipeline import legacy_io diff --git a/openpype/hosts/aftereffects/api/lib.py b/openpype/hosts/aftereffects/api/lib.py index 8cdf9c407ec..c738bcba2dc 100644 --- a/openpype/hosts/aftereffects/api/lib.py +++ b/openpype/hosts/aftereffects/api/lib.py @@ -7,7 +7,7 @@ import logging from functools import partial -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.pipeline import install_host from openpype.modules import ModulesManager diff --git a/openpype/hosts/aftereffects/api/pipeline.py b/openpype/hosts/aftereffects/api/pipeline.py index 7026fe3f053..68a00e30b76 100644 --- a/openpype/hosts/aftereffects/api/pipeline.py +++ b/openpype/hosts/aftereffects/api/pipeline.py @@ -1,6 +1,6 @@ import os -from Qt import QtWidgets +from qtpy import QtWidgets import pyblish.api diff --git a/openpype/hosts/blender/api/ops.py b/openpype/hosts/blender/api/ops.py index e0e09277dfd..481c199db29 100644 --- a/openpype/hosts/blender/api/ops.py +++ b/openpype/hosts/blender/api/ops.py @@ -10,7 +10,7 @@ from types import ModuleType from typing import Dict, List, Optional, Union -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import bpy import bpy.utils.previews diff --git a/openpype/hosts/harmony/api/lib.py b/openpype/hosts/harmony/api/lib.py index e5e7ad1b7ee..e1e77bfbee0 100644 --- a/openpype/hosts/harmony/api/lib.py +++ b/openpype/hosts/harmony/api/lib.py @@ -14,7 +14,7 @@ import signal import time from uuid import uuid4 -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui import collections from .server import Server diff --git a/openpype/hosts/photoshop/api/launch_logic.py b/openpype/hosts/photoshop/api/launch_logic.py index 1f0203dca6d..1403b6cfa17 100644 --- a/openpype/hosts/photoshop/api/launch_logic.py +++ b/openpype/hosts/photoshop/api/launch_logic.py @@ -8,7 +8,7 @@ WebSocketAsync ) -from Qt import QtCore +from qtpy import QtCore from openpype.lib import Logger from openpype.pipeline import legacy_io diff --git a/openpype/hosts/photoshop/api/lib.py b/openpype/hosts/photoshop/api/lib.py index 221b4314e6f..e3b601d011c 100644 --- a/openpype/hosts/photoshop/api/lib.py +++ b/openpype/hosts/photoshop/api/lib.py @@ -3,7 +3,7 @@ import contextlib import traceback -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.lib import env_value_to_bool, Logger from openpype.modules import ModulesManager diff --git a/openpype/hosts/photoshop/api/pipeline.py b/openpype/hosts/photoshop/api/pipeline.py index 9f6fc0983ce..d2da8c5cb46 100644 --- a/openpype/hosts/photoshop/api/pipeline.py +++ b/openpype/hosts/photoshop/api/pipeline.py @@ -1,5 +1,5 @@ import os -from Qt import QtWidgets +from qtpy import QtWidgets import pyblish.api diff --git a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py index 76724581659..2d655cae325 100644 --- a/openpype/hosts/photoshop/plugins/create/create_legacy_image.py +++ b/openpype/hosts/photoshop/plugins/create/create_legacy_image.py @@ -1,6 +1,6 @@ import re -from Qt import QtWidgets +from qtpy import QtWidgets from openpype.pipeline import create from openpype.hosts.photoshop import api as photoshop diff --git a/openpype/hosts/tvpaint/api/launch_script.py b/openpype/hosts/tvpaint/api/launch_script.py index c474a10529f..614dbe8a6e5 100644 --- a/openpype/hosts/tvpaint/api/launch_script.py +++ b/openpype/hosts/tvpaint/api/launch_script.py @@ -6,7 +6,7 @@ import platform import logging -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import style from openpype.pipeline import install_host diff --git a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py index a085830e96e..009b69c4f15 100644 --- a/openpype/hosts/tvpaint/plugins/create/create_render_layer.py +++ b/openpype/hosts/tvpaint/plugins/create/create_render_layer.py @@ -207,8 +207,8 @@ def process(self): ) def _ask_user_subset_override(self, instance): - from Qt import QtCore - from Qt.QtWidgets import QMessageBox + from qtpy import QtCore + from qtpy.QtWidgets import QMessageBox title = "Subset \"{}\" already exist".format(instance["subset"]) text = ( diff --git a/openpype/hosts/unreal/api/tools_ui.py b/openpype/hosts/unreal/api/tools_ui.py index 2500f8495f6..708e167a657 100644 --- a/openpype/hosts/unreal/api/tools_ui.py +++ b/openpype/hosts/unreal/api/tools_ui.py @@ -1,5 +1,5 @@ import sys -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype import ( resources, From 374d4d9683ca9e8bc0aa56ca7c336385c917adc3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 13 Dec 2022 17:48:48 +0100 Subject: [PATCH 120/130] use qtpy in fusion --- openpype/hosts/fusion/api/menu.py | 2 +- openpype/hosts/fusion/api/pipeline.py | 2 +- openpype/hosts/fusion/api/pulse.py | 2 +- .../hosts/fusion/deploy/MenuScripts/install_pyside2.py | 8 ++++---- .../fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py | 2 +- openpype/hosts/fusion/plugins/inventory/set_tool_color.py | 2 +- openpype/hosts/fusion/scripts/set_rendermode.py | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/openpype/hosts/fusion/api/menu.py b/openpype/hosts/fusion/api/menu.py index 39126935e63..42fbab70a6a 100644 --- a/openpype/hosts/fusion/api/menu.py +++ b/openpype/hosts/fusion/api/menu.py @@ -1,6 +1,6 @@ import sys -from Qt import QtWidgets, QtCore, QtGui +from qtpy import QtWidgets, QtCore, QtGui from openpype.tools.utils import host_tools from openpype.style import load_stylesheet diff --git a/openpype/hosts/fusion/api/pipeline.py b/openpype/hosts/fusion/api/pipeline.py index b6092f7c1b5..6315fe443de 100644 --- a/openpype/hosts/fusion/api/pipeline.py +++ b/openpype/hosts/fusion/api/pipeline.py @@ -6,7 +6,7 @@ import logging import pyblish.api -from Qt import QtCore +from qtpy import QtCore from openpype.lib import ( Logger, diff --git a/openpype/hosts/fusion/api/pulse.py b/openpype/hosts/fusion/api/pulse.py index eb7ef3785d6..762f05ba7e0 100644 --- a/openpype/hosts/fusion/api/pulse.py +++ b/openpype/hosts/fusion/api/pulse.py @@ -1,7 +1,7 @@ import os import sys -from Qt import QtCore +from qtpy import QtCore class PulseThread(QtCore.QThread): diff --git a/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py b/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py index ab9f13ce056..e1240fd6770 100644 --- a/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py +++ b/openpype/hosts/fusion/deploy/MenuScripts/install_pyside2.py @@ -6,10 +6,10 @@ try: - from Qt import QtWidgets # noqa: F401 - from Qt import __binding__ - print(f"Qt binding: {__binding__}") - mod = importlib.import_module(__binding__) + from qtpy import API_NAME + + print(f"Qt binding: {API_NAME}") + mod = importlib.import_module(API_NAME) print(f"Qt path: {mod.__file__}") print("Qt library found, nothing to do..") diff --git a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py index 93f775b24bf..f08dc0bf2c4 100644 --- a/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py +++ b/openpype/hosts/fusion/deploy/Scripts/Comp/OpenPype/switch_ui.py @@ -3,7 +3,7 @@ import glob import logging -from Qt import QtWidgets, QtCore +from qtpy import QtWidgets, QtCore import qtawesome as qta diff --git a/openpype/hosts/fusion/plugins/inventory/set_tool_color.py b/openpype/hosts/fusion/plugins/inventory/set_tool_color.py index c7530ce6745..a057ad1e898 100644 --- a/openpype/hosts/fusion/plugins/inventory/set_tool_color.py +++ b/openpype/hosts/fusion/plugins/inventory/set_tool_color.py @@ -1,4 +1,4 @@ -from Qt import QtGui, QtWidgets +from qtpy import QtGui, QtWidgets from openpype.pipeline import InventoryAction from openpype import style diff --git a/openpype/hosts/fusion/scripts/set_rendermode.py b/openpype/hosts/fusion/scripts/set_rendermode.py index f0638e4fe3c..9d2bfef3107 100644 --- a/openpype/hosts/fusion/scripts/set_rendermode.py +++ b/openpype/hosts/fusion/scripts/set_rendermode.py @@ -1,4 +1,4 @@ -from Qt import QtWidgets +from qtpy import QtWidgets import qtawesome from openpype.hosts.fusion.api import get_current_comp From 7810d425dabe7900411809e61444c9ab820205bb Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Tue, 13 Dec 2022 16:52:32 +0000 Subject: [PATCH 121/130] Camera is created only when the creator is not using the selection --- openpype/hosts/blender/plugins/create/create_camera.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/openpype/hosts/blender/plugins/create/create_camera.py b/openpype/hosts/blender/plugins/create/create_camera.py index 1a3c0080693..ada512d7acf 100644 --- a/openpype/hosts/blender/plugins/create/create_camera.py +++ b/openpype/hosts/blender/plugins/create/create_camera.py @@ -32,11 +32,6 @@ def _process(self): subset = self.data["subset"] name = plugin.asset_name(asset, subset) - camera = bpy.data.cameras.new(subset) - camera_obj = bpy.data.objects.new(subset, camera) - - instances.objects.link(camera_obj) - asset_group = bpy.data.objects.new(name=name, object_data=None) asset_group.empty_display_type = 'SINGLE_ARROW' instances.objects.link(asset_group) @@ -53,6 +48,11 @@ def _process(self): bpy.ops.object.parent_set(keep_transform=True) else: plugin.deselect_all() + camera = bpy.data.cameras.new(subset) + camera_obj = bpy.data.objects.new(subset, camera) + + instances.objects.link(camera_obj) + camera_obj.select_set(True) asset_group.select_set(True) bpy.context.view_layer.objects.active = asset_group From 730df2a4cd976f859f78cdf0ca60e484edd0d51f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Tue, 13 Dec 2022 21:55:41 +0100 Subject: [PATCH 122/130] deadline: pr comments --- .../deadline/plugins/publish/submit_publish_job.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/openpype/modules/deadline/plugins/publish/submit_publish_job.py b/openpype/modules/deadline/plugins/publish/submit_publish_job.py index 5ed8c834127..5c5c54febb0 100644 --- a/openpype/modules/deadline/plugins/publish/submit_publish_job.py +++ b/openpype/modules/deadline/plugins/publish/submit_publish_job.py @@ -127,10 +127,7 @@ class ProcessSubmittedJobOnFarm(pyblish.api.InstancePlugin): "celaction": [r".*"]} environ_job_filter = [ - "OPENPYPE_METADATA_FILE", - "OPENPYPE_PUBLISH_JOB", - "OPENPYPE_RENDER_JOB", - "OPENPYPE_LOG_NO_COLORS" + "OPENPYPE_METADATA_FILE" ] environ_keys = [ @@ -238,10 +235,11 @@ def _submit_deadline_post_job(self, instance, job, instances): "AVALON_PROJECT": legacy_io.Session["AVALON_PROJECT"], "AVALON_ASSET": legacy_io.Session["AVALON_ASSET"], "AVALON_TASK": legacy_io.Session["AVALON_TASK"], - "OPENPYPE_LOG_NO_COLORS": "1", "OPENPYPE_USERNAME": instance.context.data["user"], "OPENPYPE_PUBLISH_JOB": "1", - "OPENPYPE_RENDER_JOB": "0" + "OPENPYPE_RENDER_JOB": "0", + "OPENPYPE_REMOTE_JOB": "0", + "OPENPYPE_LOG_NO_COLORS": "1" } # add environments from self.environ_keys From c58162bb1e6246d32ef0e58f0202b17ac90afe44 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Wed, 14 Dec 2022 03:30:40 +0000 Subject: [PATCH 123/130] [Automated] Bump version --- openpype/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/version.py b/openpype/version.py index 190f7ac4010..8d82df563d8 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.14.9-nightly.3" +__version__ = "3.14.9-nightly.4" From bf4b70e9417407e603441f7ca0fe11b165452003 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 14 Dec 2022 10:46:31 +0000 Subject: [PATCH 124/130] Fixed layout update --- .../blender/plugins/load/load_layout_blend.py | 77 ++++++++++++++----- 1 file changed, 57 insertions(+), 20 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index e0124053bf9..9ab24b5ac1c 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -6,6 +6,7 @@ import bpy +from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, @@ -63,10 +64,12 @@ def _remove_asset_and_library(self, asset_group): # If it is the last object to use that library, remove it if count == 1: library = bpy.data.libraries.get(bpy.path.basename(libpath)) - bpy.data.libraries.remove(library) + if library: + bpy.data.libraries.remove(library) def _process( - self, libpath, asset_group, group_name, asset, representation, actions + self, libpath, asset_group, group_name, asset, representation, + actions, anim_instances ): with bpy.data.libraries.load( libpath, link=True, relative=False @@ -140,12 +143,12 @@ def _process( elif local_obj.type == 'ARMATURE': plugin.prepare_data(local_obj.data) - if action is not None: + if action: if local_obj.animation_data is None: local_obj.animation_data_create() local_obj.animation_data.action = action elif (local_obj.animation_data and - local_obj.animation_data.action is not None): + local_obj.animation_data.action): plugin.prepare_data( local_obj.animation_data.action) @@ -157,19 +160,26 @@ def _process( t.id = local_obj elif local_obj.type == 'EMPTY': - creator_plugin = get_legacy_creator_by_name("CreateAnimation") - if not creator_plugin: - raise ValueError("Creator plugin \"CreateAnimation\" was " - "not found.") - - legacy_create( - creator_plugin, - name=local_obj.name.split(':')[-1] + "_animation", - asset=asset, - options={"useSelection": False, - "asset_group": local_obj}, - data={"dependencies": representation} - ) + if (not anim_instances or + (anim_instances and + local_obj.name not in anim_instances.keys())): + avalon = local_obj.get(AVALON_PROPERTY) + if avalon and avalon.get('family') == 'rig': + creator_plugin = get_legacy_creator_by_name( + "CreateAnimation") + if not creator_plugin: + raise ValueError( + "Creator plugin \"CreateAnimation\" was " + "not found.") + + legacy_create( + creator_plugin, + name=local_obj.name.split(':')[-1] + "_animation", + asset=asset, + options={"useSelection": False, + "asset_group": local_obj}, + data={"dependencies": representation} + ) if not local_obj.get(AVALON_PROPERTY): local_obj[AVALON_PROPERTY] = dict() @@ -272,7 +282,8 @@ def process_asset( avalon_container.objects.link(asset_group) objects = self._process( - libpath, asset_group, group_name, asset, representation, None) + libpath, asset_group, group_name, asset, representation, + None, None) for child in asset_group.children: if child.get(AVALON_PROPERTY): @@ -352,10 +363,20 @@ def update(self, container: Dict, representation: Dict): return actions = {} + anim_instances = {} for obj in asset_group.children: obj_meta = obj.get(AVALON_PROPERTY) if obj_meta.get('family') == 'rig': + # Get animation instance + collections = list(obj.users_collection) + for c in collections: + avalon = c.get(AVALON_PROPERTY) + if avalon and avalon.get('family') == 'animation': + anim_instances[obj.name] = c.name + break + + # Get armature's action rig = None for child in obj.children: if child.type == 'ARMATURE': @@ -384,9 +405,25 @@ def update(self, container: Dict, representation: Dict): # If it is the last object to use that library, remove it if count == 1: library = bpy.data.libraries.get(bpy.path.basename(group_libpath)) - bpy.data.libraries.remove(library) + if library: + bpy.data.libraries.remove(library) + + asset = container.get("asset_name").split("_")[0] + + self._process( + str(libpath), asset_group, object_name, asset, + str(representation.get("_id")), actions, anim_instances + ) - self._process(str(libpath), asset_group, object_name, actions) + for o in anim_instances.keys(): + try: + obj = bpy.data.objects[o] + bpy.data.collections[anim_instances[o]].objects.link(obj) + except KeyError: + self.log.info(f"Object {o} does not exist anymore.") + coll = bpy.data.collections.get(anim_instances[o]) + if (coll): + bpy.data.collections.remove(coll) avalon_container = bpy.data.collections.get(AVALON_CONTAINERS) for child in asset_group.children: From f222c2b9cc36e3f3d15d6b97969f3eafc0888ad9 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 14 Dec 2022 11:06:26 +0000 Subject: [PATCH 125/130] Small fix and comments --- .../hosts/blender/plugins/load/load_layout_blend.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index 9ab24b5ac1c..c98b5c088ad 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -6,7 +6,6 @@ import bpy -from openpype import lib from openpype.pipeline import ( legacy_create, get_representation_path, @@ -415,13 +414,14 @@ def update(self, container: Dict, representation: Dict): str(representation.get("_id")), actions, anim_instances ) - for o in anim_instances.keys(): + # Link the new objects to the animation collection + for inst in anim_instances.keys(): try: - obj = bpy.data.objects[o] - bpy.data.collections[anim_instances[o]].objects.link(obj) + obj = bpy.data.objects[inst] + bpy.data.collections[anim_instances[inst]].objects.link(obj) except KeyError: - self.log.info(f"Object {o} does not exist anymore.") - coll = bpy.data.collections.get(anim_instances[o]) + self.log.info(f"Object {inst} does not exist anymore.") + coll = bpy.data.collections.get(anim_instances[inst]) if (coll): bpy.data.collections.remove(coll) From 6f2c16fc6d92833b7f9e7747dd38e7129c2ef945 Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Wed, 14 Dec 2022 11:09:08 +0000 Subject: [PATCH 126/130] Hound fixes --- openpype/hosts/blender/plugins/load/load_layout_blend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index c98b5c088ad..f95d17a17f6 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -176,7 +176,7 @@ def _process( name=local_obj.name.split(':')[-1] + "_animation", asset=asset, options={"useSelection": False, - "asset_group": local_obj}, + "asset_group": local_obj}, data={"dependencies": representation} ) From 4d20f025f63d8178aece5fe58d28a8a3f6997aeb Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Wed, 14 Dec 2022 16:34:40 +0100 Subject: [PATCH 127/130] :bug: fix empty ue folder bug --- openpype/hosts/unreal/lib.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/unreal/lib.py b/openpype/hosts/unreal/lib.py index d02c6de357f..095f5e414be 100644 --- a/openpype/hosts/unreal/lib.py +++ b/openpype/hosts/unreal/lib.py @@ -50,7 +50,10 @@ def get_engine_versions(env=None): # environment variable not set pass except OSError: - # specified directory doesn't exists + # specified directory doesn't exist + pass + except StopIteration: + # specified directory doesn't exist pass # if we've got something, terminate auto-detection process From 52e8601d44ab90dc7d6b57cfb2c4d30892340daa Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 15 Dec 2022 11:41:11 +0100 Subject: [PATCH 128/130] use string instead of custom type for signal parameter --- .../publisher/widgets/card_view_widgets.py | 26 ++++++------------- 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/openpype/tools/publisher/widgets/card_view_widgets.py b/openpype/tools/publisher/widgets/card_view_widgets.py index 09635d1a15e..57336f9304c 100644 --- a/openpype/tools/publisher/widgets/card_view_widgets.py +++ b/openpype/tools/publisher/widgets/card_view_widgets.py @@ -43,24 +43,14 @@ ) -class SelectionType: - def __init__(self, name): - self.name = name - - def __eq__(self, other): - if isinstance(other, SelectionType): - other = other.name - return self.name == other - - class SelectionTypes: - clear = SelectionType("clear") - extend = SelectionType("extend") - extend_to = SelectionType("extend_to") + clear = "clear" + extend = "extend" + extend_to = "extend_to" class BaseGroupWidget(QtWidgets.QWidget): - selected = QtCore.Signal(str, str, SelectionType) + selected = QtCore.Signal(str, str, str) removed_selected = QtCore.Signal() def __init__(self, group_name, parent): @@ -269,7 +259,7 @@ def update_instances(self, instances): class CardWidget(BaseClickableFrame): """Clickable card used as bigger button.""" - selected = QtCore.Signal(str, str, SelectionType) + selected = QtCore.Signal(str, str, str) # Group identifier of card # - this must be set because if send when mouse is released with card id _group_identifier = None @@ -755,11 +745,11 @@ def _on_widget_selection(self, instance_id, group_name, selection_type): group_widget = self._widgets_by_group[group_name] new_widget = group_widget.get_widget_by_item_id(instance_id) - if selection_type is SelectionTypes.clear: + if selection_type == SelectionTypes.clear: self._select_item_clear(instance_id, group_name, new_widget) - elif selection_type is SelectionTypes.extend: + elif selection_type == SelectionTypes.extend: self._select_item_extend(instance_id, group_name, new_widget) - elif selection_type is SelectionTypes.extend_to: + elif selection_type == SelectionTypes.extend_to: self._select_item_extend_to(instance_id, group_name, new_widget) self.selection_changed.emit() From 74972bec80b523e4457627801e6bfacf8d53fb8b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 15 Dec 2022 11:41:28 +0100 Subject: [PATCH 129/130] added all constants to __all__ --- openpype/tools/publisher/constants.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/constants.py b/openpype/tools/publisher/constants.py index 74337ea1abb..96f74a5a5c3 100644 --- a/openpype/tools/publisher/constants.py +++ b/openpype/tools/publisher/constants.py @@ -35,5 +35,8 @@ "SORT_VALUE_ROLE", "IS_GROUP_ROLE", "CREATOR_IDENTIFIER_ROLE", - "FAMILY_ROLE" + "CREATOR_THUMBNAIL_ENABLED_ROLE", + "FAMILY_ROLE", + "GROUP_ROLE", + "CONVERTER_IDENTIFIER_ROLE", ) From c07bcb2b213cefaf2ae621d60a243c486d6ca80a Mon Sep 17 00:00:00 2001 From: Simone Barbieri Date: Thu, 15 Dec 2022 12:51:43 +0000 Subject: [PATCH 130/130] Fix problem when updating or removing layout with non loaded objects --- openpype/hosts/blender/plugins/load/load_layout_blend.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/openpype/hosts/blender/plugins/load/load_layout_blend.py b/openpype/hosts/blender/plugins/load/load_layout_blend.py index f95d17a17f6..7d2fd234441 100644 --- a/openpype/hosts/blender/plugins/load/load_layout_blend.py +++ b/openpype/hosts/blender/plugins/load/load_layout_blend.py @@ -48,8 +48,14 @@ def _remove(self, asset_group): bpy.data.objects.remove(obj) def _remove_asset_and_library(self, asset_group): + if not asset_group.get(AVALON_PROPERTY): + return + libpath = asset_group.get(AVALON_PROPERTY).get('libpath') + if not libpath: + return + # Check how many assets use the same library count = 0 for obj in bpy.data.collections.get(AVALON_CONTAINERS).all_objects: