From 1b184a09f9d3c9ac656f45a2bacded0125399795 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Fri, 18 Feb 2022 10:31:09 +0100 Subject: [PATCH 01/52] add root keys and project keys --- openpype/lib/path_tools.py | 90 ++++++++++++++++++++++++++------------ 1 file changed, 61 insertions(+), 29 deletions(-) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index c0b78c57249..181417c38c2 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -6,11 +6,12 @@ import six from openpype.settings import get_project_settings -from openpype.settings.lib import get_site_local_overrides from .anatomy import Anatomy from .profiles_filtering import filter_profiles +import avalon.api + log = logging.getLogger(__name__) @@ -130,45 +131,75 @@ def get_last_version_from_path(path_dir, filter): return None -def compute_paths(basic_paths_items, project_root): +def concatenate_splitted_paths(split_paths, anatomy): pattern_array = re.compile(r"\[.*\]") - project_root_key = "__project_root__" output = [] - for path_items in basic_paths_items: + for path_items in split_paths: clean_items = [] + if isinstance(path_items, str): + path_items = [path_items] + for path_item in path_items: - matches = re.findall(pattern_array, path_item) - if len(matches) > 0: - path_item = path_item.replace(matches[0], "") - if path_item == project_root_key: - path_item = project_root + if not re.match(r"{.+}", path_item): + path_item = re.sub(pattern_array, "", path_item) clean_items.append(path_item) + + # backward compatibility + if "__project_root__" in path_items: + for root, root_path in anatomy.roots.items(): + if not os.path.exists(str(root_path)): + log.debug("Root {} path path {} not exist on \ + computer!".format(root, root_path)) + continue + clean_items = [f"{{root[{root}]}}", "{project[name]}"] \ + + clean_items[1:] + output.append(os.path.normpath(os.path.sep.join(clean_items))) + continue + output.append(os.path.normpath(os.path.sep.join(clean_items))) + return output +def get_format_data(anatomy): + dbcon = avalon.api.AvalonMongoDB() + dbcon.Session["AVALON_PROJECT"] = anatomy.project_name + project_doc = dbcon.find_one({"type": "project"}) + project_code = project_doc["data"]["code"] + + return { + "root": anatomy.roots, + "project": { + "name": anatomy.project_name, + "code": project_code + }, + } + + +def fill_paths(path_list, anatomy): + format_data = get_format_data(anatomy) + filled_paths = [] + + for path in path_list: + new_path = path.format(**format_data) + filled_paths.append(new_path) + + return filled_paths + + def create_project_folders(basic_paths, project_name): anatomy = Anatomy(project_name) - roots_paths = [] - if isinstance(anatomy.roots, dict): - for root in anatomy.roots.values(): - roots_paths.append(root.value) - else: - roots_paths.append(anatomy.roots.value) - - for root_path in roots_paths: - project_root = os.path.join(root_path, project_name) - full_paths = compute_paths(basic_paths, project_root) - # Create folders - for path in full_paths: - full_path = path.format(project_root=project_root) - if os.path.exists(full_path): - log.debug( - "Folder already exists: {}".format(full_path) - ) - else: - log.debug("Creating folder: {}".format(full_path)) - os.makedirs(full_path) + + concat_paths = concatenate_splitted_paths(basic_paths, anatomy) + filled_paths = fill_paths(concat_paths, anatomy) + + # Create folders + for path in filled_paths: + if os.path.exists(path): + log.debug("Folder already exists: {}".format(path)) + else: + log.debug("Creating folder: {}".format(path)) + os.makedirs(path) def _list_path_items(folder_structure): @@ -267,6 +298,7 @@ class HostDirmap: on_dirmap_enabled: run host code for enabling dirmap do_dirmap: run host code to do actual remapping """ + def __init__(self, host_name, project_settings, sync_module=None): self.host_name = host_name self.project_settings = project_settings From 075b80563b84e720d1bd18a070b0a194a322a9a8 Mon Sep 17 00:00:00 2001 From: BenoitConnan Date: Mon, 28 Feb 2022 15:09:16 +0100 Subject: [PATCH 02/52] add python 2 compatibility to path_tools --- openpype/lib/path_tools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 181417c38c2..916e392eb2f 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -151,8 +151,8 @@ def concatenate_splitted_paths(split_paths, anatomy): log.debug("Root {} path path {} not exist on \ computer!".format(root, root_path)) continue - clean_items = [f"{{root[{root}]}}", "{project[name]}"] \ - + clean_items[1:] + clean_items = ["{{root[{}]}}".format(root), + r"{project[name]}"] + clean_items[1:] output.append(os.path.normpath(os.path.sep.join(clean_items))) continue From df26ddd1ebd77282cf0202f7cd4830fd49016165 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20Marinov?= Date: Sun, 12 Jun 2022 00:18:07 +0200 Subject: [PATCH 03/52] Fix Nuke model loader to load full geo hierarchy by default --- openpype/hosts/nuke/plugins/load/load_model.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index 2f54595cb0a..37670d8d4e7 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -60,6 +60,10 @@ def load(self, context, name, namespace, data): inpanel=False ) model_node.forceValidate() + + # workaround to load all geo nodes, not just top level ones + model_node.knob('scene_view').setAllItems(model_node.knob('scene_view').getAllItems(), True) + model_node["frame_rate"].setValue(float(fps)) # workaround because nuke's bug is not adding @@ -142,6 +146,9 @@ def update(self, container, representation): model_node["frame_rate"].setValue(float(fps)) model_node["file"].setValue(file) + # workaround to load all geo nodes, not just top level ones + model_node.knob('scene_view').setAllItems(model_node.knob('scene_view').getAllItems(), True) + # workaround because nuke's bug is # not adding animation keys properly xpos = model_node.xpos() From 41ad5e09007eba53e0e91579157f39e29b596587 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20Marinov?= Date: Sun, 12 Jun 2022 01:04:38 +0200 Subject: [PATCH 04/52] PEP-8 --- openpype/hosts/nuke/plugins/load/load_model.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index 37670d8d4e7..74f0b9731b6 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -62,7 +62,9 @@ def load(self, context, name, namespace, data): model_node.forceValidate() # workaround to load all geo nodes, not just top level ones - model_node.knob('scene_view').setAllItems(model_node.knob('scene_view').getAllItems(), True) + model_node.knob("scene_view").setAllItems( + model_node.knob("scene_view").getAllItems(), True + ) model_node["frame_rate"].setValue(float(fps)) @@ -147,7 +149,9 @@ def update(self, container, representation): model_node["file"].setValue(file) # workaround to load all geo nodes, not just top level ones - model_node.knob('scene_view').setAllItems(model_node.knob('scene_view').getAllItems(), True) + model_node.knob("scene_view").setAllItems( + model_node.knob("scene_view").getAllItems(), True + ) # workaround because nuke's bug is # not adding animation keys properly From 02f9b7729668d5dce9dfee1aebe8177771e67c78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?G=C3=A1bor=20Marinov?= Date: Mon, 13 Jun 2022 10:37:43 +0200 Subject: [PATCH 05/52] More elegant solution, does not require preferences settings --- openpype/hosts/nuke/plugins/load/load_model.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index 74f0b9731b6..44e8490d4cc 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -61,10 +61,10 @@ def load(self, context, name, namespace, data): ) model_node.forceValidate() - # workaround to load all geo nodes, not just top level ones - model_node.knob("scene_view").setAllItems( - model_node.knob("scene_view").getAllItems(), True - ) + # Ensure all items are imported and selected. + scene_view = model_node.knob('scene_view') + scene_view.setImportedItems(scene_view.getAllItems()) + scene_view.setSelectedItems(scene_view.getAllItems()) model_node["frame_rate"].setValue(float(fps)) @@ -148,10 +148,10 @@ def update(self, container, representation): model_node["frame_rate"].setValue(float(fps)) model_node["file"].setValue(file) - # workaround to load all geo nodes, not just top level ones - model_node.knob("scene_view").setAllItems( - model_node.knob("scene_view").getAllItems(), True - ) + # Ensure all items are imported and selected. + scene_view = model_node.knob('scene_view') + scene_view.setImportedItems(scene_view.getAllItems()) + scene_view.setSelectedItems(scene_view.getAllItems()) # workaround because nuke's bug is # not adding animation keys properly From 238e3a03216f25141a02fe4637b44b6a09b4523f Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jun 2022 13:28:07 +0200 Subject: [PATCH 06/52] add run eventserver launcher to gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 28cfb4b1e9e..8b268b7f285 100644 --- a/.gitignore +++ b/.gitignore @@ -102,3 +102,5 @@ website/.docusaurus .poetry/ .python-version + +tools/run_eventserver.ps1 From 6033304282e7dc4a276ff7558161b66cfff0c3cb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 13 Jun 2022 13:29:18 +0200 Subject: [PATCH 07/52] making wider extension filter for run eventserver gitignore --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 8b268b7f285..7eaef69873b 100644 --- a/.gitignore +++ b/.gitignore @@ -103,4 +103,4 @@ website/.docusaurus .poetry/ .python-version -tools/run_eventserver.ps1 +tools/run_eventserver.* From 09e92ebad87c09d0186f759ac738fc2389270ec3 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 15 Jun 2022 15:20:05 +0200 Subject: [PATCH 08/52] flame: make sure `representations` key is always on instance data --- .../hosts/flame/plugins/publish/collect_timeline_instances.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index 0aca7c38d5c..aa19b78bf1c 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -125,7 +125,8 @@ def process(self, context): "flameAddTasks": self.add_tasks, "tasks": { task["name"]: {"type": task["type"]} - for task in self.add_tasks} + for task in self.add_tasks}, + "representations": [] }) self.log.debug("__ inst_data: {}".format(pformat(inst_data))) From be328e5396760f683c42ed21ca01385e42ec2cf0 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Wed, 15 Jun 2022 15:20:34 +0200 Subject: [PATCH 09/52] flame: implementing `keep_original_representation` switch --- .../hosts/flame/plugins/publish/extract_subset_resources.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 0bad3f7cfc0..255d57a8eec 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -22,6 +22,8 @@ class ExtractSubsetResources(openpype.api.Extractor): hosts = ["flame"] # plugin defaults + keep_original_representation = False + default_presets = { "thumbnail": { "active": True, @@ -44,7 +46,9 @@ class ExtractSubsetResources(openpype.api.Extractor): export_presets_mapping = {} def process(self, instance): - if "representations" not in instance.data: + + if not self.keep_original_representation: + # remove previeous representation if not needed instance.data["representations"] = [] # flame objects From 98646b9c0ad95aed664c6b5e10fe266cdba9040b Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 16 Jun 2022 15:32:05 +0200 Subject: [PATCH 10/52] standalone: adding rename shot switch and hierarchy switch --- .../plugins/publish/collect_hierarchy.py | 19 ++++++++++++------- .../project_settings/standalonepublisher.json | 6 ++++-- .../schema_project_standalonepublisher.json | 15 +++++++++++++-- 3 files changed, 29 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py index 77163651c46..2452f77e567 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/collect_hierarchy.py @@ -1,4 +1,5 @@ import os +from pprint import pformat import re from copy import deepcopy import pyblish.api @@ -21,6 +22,7 @@ class CollectHierarchyInstance(pyblish.api.ContextPlugin): families = ["shot"] # presets + shot_rename = True shot_rename_template = None shot_rename_search_patterns = None shot_add_hierarchy = None @@ -46,7 +48,7 @@ def rename_with_hierarchy(self, instance): parent_name = instance.context.data["assetEntity"]["name"] clip = instance.data["item"] clip_name = os.path.splitext(clip.name)[0].lower() - if self.shot_rename_search_patterns: + if self.shot_rename_search_patterns and self.shot_rename: search_text += parent_name + clip_name instance.data["anatomyData"].update({"clip_name": clip_name}) for type, pattern in self.shot_rename_search_patterns.items(): @@ -56,9 +58,9 @@ def rename_with_hierarchy(self, instance): continue instance.data["anatomyData"][type] = match[-1] - # format to new shot name - instance.data["asset"] = self.shot_rename_template.format( - **instance.data["anatomyData"]) + # format to new shot name + instance.data["asset"] = self.shot_rename_template.format( + **instance.data["anatomyData"]) def create_hierarchy(self, instance): parents = list() @@ -82,7 +84,7 @@ def create_hierarchy(self, instance): "entity_name": entity["name"] }) - if self.shot_add_hierarchy: + if self.shot_add_hierarchy.get("enabled"): parent_template_patern = re.compile(r"\{([a-z]*?)\}") # fill the parents parts from presets shot_add_hierarchy = self.shot_add_hierarchy.copy() @@ -126,8 +128,8 @@ def create_hierarchy(self, instance): instance.data["parents"] = parents # print - self.log.debug(f"Hierarchy: {hierarchy}") - self.log.debug(f"parents: {parents}") + self.log.warning(f"Hierarchy: {hierarchy}") + self.log.info(f"parents: {parents}") if self.shot_add_tasks: tasks_to_add = dict() @@ -161,6 +163,9 @@ def create_hierarchy(self, instance): }) def process(self, context): + self.log.info("self.shot_add_hierarchy: {}".format( + pformat(self.shot_add_hierarchy) + )) for instance in context: if instance.data["family"] in self.families: self.processing_instance(instance) diff --git a/openpype/settings/defaults/project_settings/standalonepublisher.json b/openpype/settings/defaults/project_settings/standalonepublisher.json index e36232d3f78..b6e2e056a1d 100644 --- a/openpype/settings/defaults/project_settings/standalonepublisher.json +++ b/openpype/settings/defaults/project_settings/standalonepublisher.json @@ -257,12 +257,14 @@ ] }, "CollectHierarchyInstance": { + "shot_rename": true, "shot_rename_template": "{project[code]}_{_sequence_}_{_shot_}", "shot_rename_search_patterns": { - "_sequence_": "(\\d{4})(?=_\\d{4})", - "_shot_": "(\\d{4})(?!_\\d{4})" + "_sequence_": "(sc\\d{3})", + "_shot_": "(sh\\d{3})" }, "shot_add_hierarchy": { + "enabled": true, "parents_path": "{project}/{folder}/{sequence}", "parents": { "project": "{project[name]}", diff --git a/openpype/settings/entities/schemas/projects_schema/schema_project_standalonepublisher.json b/openpype/settings/entities/schemas/projects_schema/schema_project_standalonepublisher.json index 37fcaac69fb..ae250076833 100644 --- a/openpype/settings/entities/schemas/projects_schema/schema_project_standalonepublisher.json +++ b/openpype/settings/entities/schemas/projects_schema/schema_project_standalonepublisher.json @@ -271,6 +271,11 @@ "label": "Collect Instance Hierarchy", "is_group": true, "children": [ + { + "type": "boolean", + "key": "shot_rename", + "label": "Shot Rename" + }, { "type": "text", "key": "shot_rename_template", @@ -289,7 +294,13 @@ "type": "dict", "key": "shot_add_hierarchy", "label": "Shot hierarchy", + "checkbox_key": "enabled", "children": [ + { + "type": "boolean", + "key": "enabled", + "label": "Enabled" + }, { "type": "text", "key": "parents_path", @@ -343,8 +354,8 @@ "type": "number", "key": "timeline_frame_start", "label": "Timeline start frame", - "default": 900000, - "minimum": 1, + "default": 90000, + "minimum": 0, "maximum": 10000000 }, { From 2da7261abff69322970ee6daafcc0068ce04216d Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Thu, 16 Jun 2022 15:32:37 +0200 Subject: [PATCH 11/52] standalone: ensure extension with dot at start --- .../plugins/publish/extract_trim_video_audio.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/standalonepublisher/plugins/publish/extract_trim_video_audio.py b/openpype/hosts/standalonepublisher/plugins/publish/extract_trim_video_audio.py index f327895b83d..51dc84e9a23 100644 --- a/openpype/hosts/standalonepublisher/plugins/publish/extract_trim_video_audio.py +++ b/openpype/hosts/standalonepublisher/plugins/publish/extract_trim_video_audio.py @@ -39,11 +39,14 @@ def process(self, instance): # Generate mov file. fps = instance.data["fps"] video_file_path = instance.data["editorialSourcePath"] - extensions = instance.data.get("extensions", [".mov"]) + extensions = instance.data.get("extensions", ["mov"]) for ext in extensions: self.log.info("Processing ext: `{}`".format(ext)) + if not ext.startswith("."): + ext = "." + ext + clip_trimed_path = os.path.join( staging_dir, instance.data["name"] + ext) # # check video file metadata From 3a1d9c9fcadab29f3b2962527dedea4da49abd4a Mon Sep 17 00:00:00 2001 From: Petr Kalis Date: Mon, 20 Jun 2022 13:11:01 +0200 Subject: [PATCH 12/52] Added far future value for null values for dates Null values were sorted as last, this keeps queued items together with last synched. --- openpype/modules/sync_server/tray/models.py | 60 +++++++++++++-------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/openpype/modules/sync_server/tray/models.py b/openpype/modules/sync_server/tray/models.py index c49edeafb90..6d1e85c17a2 100644 --- a/openpype/modules/sync_server/tray/models.py +++ b/openpype/modules/sync_server/tray/models.py @@ -1,6 +1,7 @@ import os import attr from bson.objectid import ObjectId +import datetime from Qt import QtCore from Qt.QtCore import Qt @@ -413,6 +414,23 @@ def get_index(self, id): return index return None + def _convert_date(self, date_value, current_date): + """Converts 'date_value' to string. + + Value of date_value might contain date in the future, used for nicely + sort queued items next to last downloaded. + """ + try: + converted_date = None + # ignore date in the future - for sorting only + if date_value and date_value < current_date: + converted_date = date_value.strftime("%Y%m%dT%H%M%SZ") + except (AttributeError, TypeError): + # ignore unparseable values + pass + + return converted_date + class SyncRepresentationSummaryModel(_SyncRepresentationModel): """ @@ -560,7 +578,7 @@ def add_page_records(self, local_site, remote_site, representations): remote_provider = lib.translate_provider_for_icon(self.sync_server, self.project, remote_site) - + current_date = datetime.datetime.now() for repre in result.get("paginatedResults"): files = repre.get("files", []) if isinstance(files, dict): # aggregate returns dictionary @@ -570,14 +588,10 @@ def add_page_records(self, local_site, remote_site, representations): if not files: continue - local_updated = remote_updated = None - if repre.get('updated_dt_local'): - local_updated = \ - repre.get('updated_dt_local').strftime("%Y%m%dT%H%M%SZ") - - if repre.get('updated_dt_remote'): - remote_updated = \ - repre.get('updated_dt_remote').strftime("%Y%m%dT%H%M%SZ") + local_updated = self._convert_date(repre.get('updated_dt_local'), + current_date) + remote_updated = self._convert_date(repre.get('updated_dt_remote'), + current_date) avg_progress_remote = lib.convert_progress( repre.get('avg_progress_remote', '0')) @@ -645,6 +659,8 @@ def get_query(self, limit=0): if limit == 0: limit = SyncRepresentationSummaryModel.PAGE_SIZE + # replace null with value in the future for better sorting + dummy_max_date = datetime.datetime(2099, 1, 1) aggr = [ {"$match": self.get_match_part()}, {'$unwind': '$files'}, @@ -687,7 +703,7 @@ def get_query(self, limit=0): {'$cond': [ {'$size': "$order_remote.last_failed_dt"}, "$order_remote.last_failed_dt", - [] + [dummy_max_date] ]} ]}}, 'updated_dt_local': {'$first': { @@ -696,7 +712,7 @@ def get_query(self, limit=0): {'$cond': [ {'$size': "$order_local.last_failed_dt"}, "$order_local.last_failed_dt", - [] + [dummy_max_date] ]} ]}}, 'files_size': {'$ifNull': ["$files.size", 0]}, @@ -1039,6 +1055,7 @@ def add_page_records(self, local_site, remote_site, representations): self.project, remote_site) + current_date = datetime.datetime.now() for repre in result.get("paginatedResults"): # log.info("!!! repre:: {}".format(repre)) files = repre.get("files", []) @@ -1046,16 +1063,12 @@ def add_page_records(self, local_site, remote_site, representations): files = [files] for file in files: - local_updated = remote_updated = None - if repre.get('updated_dt_local'): - local_updated = \ - repre.get('updated_dt_local').strftime( - "%Y%m%dT%H%M%SZ") - - if repre.get('updated_dt_remote'): - remote_updated = \ - repre.get('updated_dt_remote').strftime( - "%Y%m%dT%H%M%SZ") + local_updated = self._convert_date( + repre.get('updated_dt_local'), + current_date) + remote_updated = self._convert_date( + repre.get('updated_dt_remote'), + current_date) remote_progress = lib.convert_progress( repre.get('progress_remote', '0')) @@ -1104,6 +1117,7 @@ def get_query(self, limit=0): if limit == 0: limit = SyncRepresentationSummaryModel.PAGE_SIZE + dummy_max_date = datetime.datetime(2099, 1, 1) aggr = [ {"$match": self.get_match_part()}, {"$unwind": "$files"}, @@ -1147,7 +1161,7 @@ def get_query(self, limit=0): '$cond': [ {'$size': "$order_remote.last_failed_dt"}, "$order_remote.last_failed_dt", - [] + [dummy_max_date] ] } ] @@ -1160,7 +1174,7 @@ def get_query(self, limit=0): '$cond': [ {'$size': "$order_local.last_failed_dt"}, "$order_local.last_failed_dt", - [] + [dummy_max_date] ] } ] From d3afa478b71974fff455669a394eaff3bca3468e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 20 Jun 2022 19:01:47 +0200 Subject: [PATCH 13/52] use query functions in celaction --- .../plugins/publish/collect_audio.py | 47 +++++++------------ 1 file changed, 17 insertions(+), 30 deletions(-) diff --git a/openpype/hosts/celaction/plugins/publish/collect_audio.py b/openpype/hosts/celaction/plugins/publish/collect_audio.py index 8acda5fc7cc..c6e3bf2c03a 100644 --- a/openpype/hosts/celaction/plugins/publish/collect_audio.py +++ b/openpype/hosts/celaction/plugins/publish/collect_audio.py @@ -4,6 +4,11 @@ import pyblish.api +from openpype.client import ( + get_subsets, + get_last_versions, + get_representations +) from openpype.pipeline import legacy_io @@ -60,10 +65,10 @@ def get_subsets(self, asset_doc, representations): """ # Query all subsets for asset - subset_docs = legacy_io.find({ - "type": "subset", - "parent": asset_doc["_id"] - }) + project_name = legacy_io.active_project() + subset_docs = get_subsets( + project_name, asset_ids=[asset_doc["_id"]], fields=["_id"] + ) # Collect all subset ids subset_ids = [ subset_doc["_id"] @@ -76,37 +81,19 @@ def get_subsets(self, asset_doc, representations): "Try this for start `r'.*'`: asset: `{}`" ).format(asset_doc["name"]) - # Last version aggregation - pipeline = [ - # Find all versions of those subsets - {"$match": { - "type": "version", - "parent": {"$in": subset_ids} - }}, - # Sorting versions all together - {"$sort": {"name": 1}}, - # Group them by "parent", but only take the last - {"$group": { - "_id": "$parent", - "_version_id": {"$last": "$_id"}, - "name": {"$last": "$name"} - }} - ] - last_versions_by_subset_id = dict() - for doc in legacy_io.aggregate(pipeline): - doc["parent"] = doc["_id"] - doc["_id"] = doc.pop("_version_id") - last_versions_by_subset_id[doc["parent"]] = doc + last_versions_by_subset_id = get_last_versions( + project_name, subset_ids, fields=["_id", "parent"] + ) version_docs_by_id = {} for version_doc in last_versions_by_subset_id.values(): version_docs_by_id[version_doc["_id"]] = version_doc - repre_docs = legacy_io.find({ - "type": "representation", - "parent": {"$in": list(version_docs_by_id.keys())}, - "name": {"$in": representations} - }) + repre_docs = get_representations( + project_name, + version_ids=version_docs_by_id.keys(), + representation_names=representations + ) repre_docs_by_version_id = collections.defaultdict(list) for repre_doc in repre_docs: version_id = repre_doc["parent"] From a93b978f354b6ed34034f0f4caaa98b8c637468e Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 20 Jun 2022 21:48:08 +0200 Subject: [PATCH 14/52] flame: fixing thumbnail duplication issue --- .../hosts/flame/plugins/publish/extract_subset_resources.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 5e0a5e344db..dd672ec3751 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -86,7 +86,11 @@ def process(self, instance): # add default preset type for thumbnail and reviewable video # update them with settings and override in case the same # are found in there - export_presets = deepcopy(self.default_presets) + _preset_keys = [k.split('_')[0] for k in self.export_presets_mapping] + export_presets = { + k: v for k, v in deepcopy(self.default_presets) + if k not in _preset_keys + } export_presets.update(self.export_presets_mapping) # loop all preset names and From 70d9b6fcb73c7ac3abb74d2218fcf2e53e845427 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 20 Jun 2022 22:00:41 +0200 Subject: [PATCH 15/52] flame: fixing dict iter with items --- .../hosts/flame/plugins/publish/extract_subset_resources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index dd672ec3751..1b6900e4058 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -88,7 +88,7 @@ def process(self, instance): # are found in there _preset_keys = [k.split('_')[0] for k in self.export_presets_mapping] export_presets = { - k: v for k, v in deepcopy(self.default_presets) + k: v for k, v in deepcopy(self.default_presets).items() if k not in _preset_keys } export_presets.update(self.export_presets_mapping) From 250f73656ac382e7d9bb441326bbd6f55118e0eb Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 20 Jun 2022 22:04:36 +0200 Subject: [PATCH 16/52] Flame: fixing NoneType in abs --- .../flame/plugins/publish/collect_timeline_instances.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py index aa19b78bf1c..b8489de7583 100644 --- a/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py +++ b/openpype/hosts/flame/plugins/publish/collect_timeline_instances.py @@ -1,4 +1,5 @@ import re +from types import NoneType import pyblish import openpype.hosts.flame.api as opfapi from openpype.hosts.flame.otio import flame_export @@ -75,6 +76,12 @@ def process(self, context): marker_data["handleEnd"] ) + # make sure there is not NoneType rather 0 + if isinstance(head, NoneType): + head = 0 + if isinstance(tail, NoneType): + tail = 0 + # make sure value is absolute if head != 0: head = abs(head) From a29a9af927c339acac56711351a3b995846e4111 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 20 Jun 2022 22:11:15 +0200 Subject: [PATCH 17/52] flame: unique name swapped with repre name unique name could be more than `thumbnail` --- .../flame/plugins/publish/extract_subset_resources.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 1b6900e4058..3ae87793983 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -228,7 +228,11 @@ def process(self, instance): # make sure only first segment is used if underscore in name # HACK: `ftrackreview_withLUT` will result only in `ftrackreview` - repr_name = unique_name.split("_")[0] + if ( + "thumbnail" in unique_name + or "ftrackreview" in unique_name + ): + repr_name = unique_name.split("_")[0] # create representation data representation_data = { @@ -267,7 +271,7 @@ def process(self, instance): if os.path.splitext(f)[-1] == ".mov" ] # then try if thumbnail is not in unique name - or unique_name == "thumbnail" + or repr_name == "thumbnail" ): representation_data["files"] = files.pop() else: From 035b202b58fc3c8a2e0f381ce4856a4f551e18a4 Mon Sep 17 00:00:00 2001 From: Jakub Jezek Date: Mon, 20 Jun 2022 22:19:05 +0200 Subject: [PATCH 18/52] Flame: fixing repr_name missing --- openpype/hosts/flame/plugins/publish/extract_subset_resources.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py index 3ae87793983..d34f5d58545 100644 --- a/openpype/hosts/flame/plugins/publish/extract_subset_resources.py +++ b/openpype/hosts/flame/plugins/publish/extract_subset_resources.py @@ -226,6 +226,7 @@ def process(self, instance): opfapi.export_clip( export_dir_path, exporting_clip, preset_path, **export_kwargs) + repr_name = unique_name # make sure only first segment is used if underscore in name # HACK: `ftrackreview_withLUT` will result only in `ftrackreview` if ( From ecda9a6099f6efe52d8ef3121d894c76277098a4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Tue, 21 Jun 2022 18:18:36 +0200 Subject: [PATCH 19/52] remove unused functions --- openpype/hosts/nuke/api/__init__.py | 6 -- openpype/hosts/nuke/api/command.py | 114 ---------------------------- openpype/hosts/nuke/api/lib.py | 23 ------ 3 files changed, 143 deletions(-) diff --git a/openpype/hosts/nuke/api/__init__.py b/openpype/hosts/nuke/api/__init__.py index b571c4098c3..9e3ef1a3975 100644 --- a/openpype/hosts/nuke/api/__init__.py +++ b/openpype/hosts/nuke/api/__init__.py @@ -8,9 +8,6 @@ ) from .command import ( - reset_frame_range, - get_handles, - reset_resolution, viewer_update_and_undo_stop ) @@ -42,9 +39,6 @@ "current_file", "work_root", - "reset_frame_range", - "get_handles", - "reset_resolution", "viewer_update_and_undo_stop", "OpenPypeCreator", diff --git a/openpype/hosts/nuke/api/command.py b/openpype/hosts/nuke/api/command.py index c756c48a129..2f772469d86 100644 --- a/openpype/hosts/nuke/api/command.py +++ b/openpype/hosts/nuke/api/command.py @@ -1,124 +1,10 @@ import logging import contextlib import nuke -from bson.objectid import ObjectId - -from openpype.pipeline import legacy_io log = logging.getLogger(__name__) -def reset_frame_range(): - """ Set frame range to current asset - Also it will set a Viewer range with - displayed handles - """ - - fps = float(legacy_io.Session.get("AVALON_FPS", 25)) - - nuke.root()["fps"].setValue(fps) - name = legacy_io.Session["AVALON_ASSET"] - asset = legacy_io.find_one({"name": name, "type": "asset"}) - asset_data = asset["data"] - - handles = get_handles(asset) - - frame_start = int(asset_data.get( - "frameStart", - asset_data.get("edit_in"))) - - frame_end = int(asset_data.get( - "frameEnd", - asset_data.get("edit_out"))) - - if not all([frame_start, frame_end]): - missing = ", ".join(["frame_start", "frame_end"]) - msg = "'{}' are not set for asset '{}'!".format(missing, name) - log.warning(msg) - nuke.message(msg) - return - - frame_start -= handles - frame_end += handles - - nuke.root()["first_frame"].setValue(frame_start) - nuke.root()["last_frame"].setValue(frame_end) - - # setting active viewers - vv = nuke.activeViewer().node() - vv["frame_range_lock"].setValue(True) - vv["frame_range"].setValue("{0}-{1}".format( - int(asset_data["frameStart"]), - int(asset_data["frameEnd"])) - ) - - -def get_handles(asset): - """ Gets handles data - - Arguments: - asset (dict): avalon asset entity - - Returns: - handles (int) - """ - data = asset["data"] - if "handles" in data and data["handles"] is not None: - return int(data["handles"]) - - parent_asset = None - if "visualParent" in data: - vp = data["visualParent"] - if vp is not None: - parent_asset = legacy_io.find_one({"_id": ObjectId(vp)}) - - if parent_asset is None: - parent_asset = legacy_io.find_one({"_id": ObjectId(asset["parent"])}) - - if parent_asset is not None: - return get_handles(parent_asset) - else: - return 0 - - -def reset_resolution(): - """Set resolution to project resolution.""" - project = legacy_io.find_one({"type": "project"}) - p_data = project["data"] - - width = p_data.get("resolution_width", - p_data.get("resolutionWidth")) - height = p_data.get("resolution_height", - p_data.get("resolutionHeight")) - - if not all([width, height]): - missing = ", ".join(["width", "height"]) - msg = "No resolution information `{0}` found for '{1}'.".format( - missing, - project["name"]) - log.warning(msg) - nuke.message(msg) - return - - current_width = nuke.root()["format"].value().width() - current_height = nuke.root()["format"].value().height() - - if width != current_width or height != current_height: - - fmt = None - for f in nuke.formats(): - if f.width() == width and f.height() == height: - fmt = f.name() - - if not fmt: - nuke.addFormat( - "{0} {1} {2}".format(int(width), int(height), project["name"]) - ) - fmt = project["name"] - - nuke.root()["format"].setValue(fmt) - - @contextlib.contextmanager def viewer_update_and_undo_stop(): """Lock viewer from updating and stop recording undo steps""" diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 505eb194197..7e44aaa7c5e 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -2151,29 +2151,6 @@ def set_favorites(self): set_context_favorites(favorite_items) -def get_hierarchical_attr(entity, attr, default=None): - attr_parts = attr.split('.') - value = entity - for part in attr_parts: - value = value.get(part) - if not value: - break - - if value or entity["type"].lower() == "project": - return value - - parent_id = entity["parent"] - if ( - entity["type"].lower() == "asset" - and entity.get("data", {}).get("visualParent") - ): - parent_id = entity["data"]["visualParent"] - - parent = legacy_io.find_one({"_id": parent_id}) - - return get_hierarchical_attr(parent, attr) - - def get_write_node_template_attr(node): ''' Gets all defined data from presets From 6d4f05e3da277f11fc7614c340f63024f1997036 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 22 Jun 2022 14:12:05 +0200 Subject: [PATCH 20/52] added ability to pass asset name to 'get_last_version_by_subset_name' --- openpype/client/entities.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/openpype/client/entities.py b/openpype/client/entities.py index 4b4a3729fe1..9864fee469f 100644 --- a/openpype/client/entities.py +++ b/openpype/client/entities.py @@ -789,14 +789,19 @@ def get_last_version_by_subset_id(project_name, subset_id, fields=None): def get_last_version_by_subset_name( - project_name, subset_name, asset_id, fields=None + project_name, subset_name, asset_id=None, asset_name=None, fields=None ): - """Last version for passed subset name under asset id. + """Last version for passed subset name under asset id/name. + + It is required to pass 'asset_id' or 'asset_name'. Asset id is recommended + if is available. Args: project_name (str): Name of project where to look for queried entities. subset_name (str): Name of subset. - asset_id (str|ObjectId): Asset id which is parnt of passed subset name. + asset_id (str|ObjectId): Asset id which is parent of passed + subset name. + asset_name (str): Asset name which is parent of passed subset name. fields (list[str]): Fields that should be returned. All fields are returned if 'None' is passed. @@ -805,6 +810,14 @@ def get_last_version_by_subset_name( Dict: Version document which can be reduced to specified 'fields'. """ + if not asset_id and not asset_name: + return None + + if not asset_id: + asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) + if not asset_doc: + return None + asset_id = asset_doc["_id"] subset_doc = get_subset_by_name( project_name, subset_name, asset_id, fields=["_id"] ) From 4e59ae973e055953cbf77f0f9c8932120e0d19b2 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 22 Jun 2022 14:19:15 +0200 Subject: [PATCH 21/52] use query functions in loaders --- .../hosts/nuke/plugins/load/load_backdrop.py | 35 ++++++++------- .../nuke/plugins/load/load_camera_abc.py | 38 ++++++++-------- openpype/hosts/nuke/plugins/load/load_clip.py | 37 +++++++-------- .../hosts/nuke/plugins/load/load_effects.py | 35 ++++++++------- .../nuke/plugins/load/load_effects_ip.py | 33 +++++++------- .../hosts/nuke/plugins/load/load_gizmo.py | 34 +++++++------- .../hosts/nuke/plugins/load/load_gizmo_ip.py | 34 +++++++------- .../hosts/nuke/plugins/load/load_image.py | 34 +++++++------- .../hosts/nuke/plugins/load/load_model.py | 38 ++++++++-------- .../nuke/plugins/load/load_script_precomp.py | 45 +++++++++---------- 10 files changed, 183 insertions(+), 180 deletions(-) diff --git a/openpype/hosts/nuke/plugins/load/load_backdrop.py b/openpype/hosts/nuke/plugins/load/load_backdrop.py index 143fdf1f303..164ab6f9f44 100644 --- a/openpype/hosts/nuke/plugins/load/load_backdrop.py +++ b/openpype/hosts/nuke/plugins/load/load_backdrop.py @@ -1,6 +1,10 @@ import nuke import nukescripts +from openpype.client import ( + get_version_by_id, + get_last_version_by_subset_id, +) from openpype.pipeline import ( legacy_io, load, @@ -188,18 +192,17 @@ def update(self, container, representation): # get main variables # Get version from io - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + # get corresponding node GN = nuke.toNode(container['objectName']) file = get_representation_path(representation).replace("\\", "/") - context = representation["context"] + name = container['name'] - version_data = version.get("data", {}) - vname = version.get("name", None) + version_data = version_doc.get("data", {}) + vname = version_doc.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) namespace = container['namespace'] @@ -237,20 +240,18 @@ def update(self, container, representation): GN["name"].setValue(object_name) # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') - - max_version = max(versions) + last_version_doc = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) # change color of node - if version.get("name") not in [max_version]: - GN["tile_color"].setValue(int("0xd88467ff", 16)) + if version_doc["_id"] == last_version_doc["_id"]: + color_value = self.node_color else: - GN["tile_color"].setValue(int(self.node_color, 16)) + color_value = "0xd88467ff" + GN["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version_doc.get("name"))) return update_container(GN, data_imprint) diff --git a/openpype/hosts/nuke/plugins/load/load_camera_abc.py b/openpype/hosts/nuke/plugins/load/load_camera_abc.py index 964ca5ec901..f5dfc8c0ab9 100644 --- a/openpype/hosts/nuke/plugins/load/load_camera_abc.py +++ b/openpype/hosts/nuke/plugins/load/load_camera_abc.py @@ -1,5 +1,9 @@ import nuke +from openpype.client import ( + get_version_by_id, + get_last_version_by_subset_id +) from openpype.pipeline import ( legacy_io, load, @@ -102,17 +106,16 @@ def update(self, container, representation): None """ # Get version from io - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + object_name = container['objectName'] # get corresponding node camera_node = nuke.toNode(object_name) # get main variables - version_data = version.get("data", {}) - vname = version.get("name", None) + version_data = version_doc.get("data", {}) + vname = version_doc.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) fps = version_data.get("fps") or nuke.root()["fps"].getValue() @@ -165,28 +168,27 @@ def update(self, container, representation): d.setInput(index, camera_node) # color node by correct color by actual version - self.node_version_color(version, camera_node) + self.node_version_color(version_doc, camera_node) - self.log.info("updated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version_doc.get("name"))) return update_container(camera_node, data_imprint) - def node_version_color(self, version, node): + def node_version_color(self, version_doc, node): """ Coloring a node by correct color by actual version """ # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') - - max_version = max(versions) + project_name = legacy_io.active_project() + last_version_doc = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) # change color of node - if version.get("name") not in [max_version]: - node["tile_color"].setValue(int("0xd88467ff", 16)) + if version_doc["_id"] == last_version_doc["_id"]: + color_value = self.node_color else: - node["tile_color"].setValue(int(self.node_color, 16)) + color_value = "0xd88467ff" + node["tile_color"].setValue(int(color_value, 16)) def switch(self, container, representation): self.update(container, representation) diff --git a/openpype/hosts/nuke/plugins/load/load_clip.py b/openpype/hosts/nuke/plugins/load/load_clip.py index 681561e3037..d177e6ba768 100644 --- a/openpype/hosts/nuke/plugins/load/load_clip.py +++ b/openpype/hosts/nuke/plugins/load/load_clip.py @@ -1,6 +1,10 @@ import nuke import qargparse +from openpype.client import ( + get_version_by_id, + get_last_version_by_subset_id, +) from openpype.pipeline import ( legacy_io, get_representation_path, @@ -196,11 +200,10 @@ def update(self, container, representation): start_at_workfile = bool("start at" in read_node['frame_mode'].value()) - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) - version_data = version.get("data", {}) + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + + version_data = version_doc.get("data", {}) repre_id = representation["_id"] repre_cont = representation["context"] @@ -251,7 +254,7 @@ def update(self, container, representation): "representation": str(representation["_id"]), "frameStart": str(first), "frameEnd": str(last), - "version": str(version.get("name")), + "version": str(version_doc.get("name")), "db_colorspace": colorspace, "source": version_data.get("source"), "handleStart": str(self.handle_start), @@ -264,26 +267,24 @@ def update(self, container, representation): if used_colorspace: updated_dict["used_colorspace"] = used_colorspace + last_version_doc = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) # change color of read_node - # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') - - max_version = max(versions) - - if version.get("name") not in [max_version]: - read_node["tile_color"].setValue(int("0xd84f20ff", 16)) + if version_doc["_id"] == last_version_doc["_id"]: + color_value = "0x4ecd25ff" else: - read_node["tile_color"].setValue(int("0x4ecd25ff", 16)) + color_value = "0xd84f20ff" + read_node["tile_color"].setValue(int(color_value, 16)) # Update the imprinted representation update_container( read_node, updated_dict ) - self.log.info("updated to version: {}".format(version.get("name"))) + self.log.info( + "updated to version: {}".format(version_doc.get("name")) + ) if version_data.get("retime", None): self._make_retimes(read_node, version_data) diff --git a/openpype/hosts/nuke/plugins/load/load_effects.py b/openpype/hosts/nuke/plugins/load/load_effects.py index 6a30330ed04..d164e0604c7 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects.py +++ b/openpype/hosts/nuke/plugins/load/load_effects.py @@ -3,6 +3,10 @@ import nuke import six +from openpype.client import ( + get_version_by_id, + get_last_version_by_subset_id, +) from openpype.pipeline import ( legacy_io, load, @@ -148,17 +152,16 @@ def update(self, container, representation): """ # get main variables # Get version from io - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + # get corresponding node GN = nuke.toNode(container['objectName']) file = get_representation_path(representation).replace("\\", "/") name = container['name'] - version_data = version.get("data", {}) - vname = version.get("name", None) + version_data = version_doc.get("data", {}) + vname = version_doc.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) workfile_first_frame = int(nuke.root()["first_frame"].getValue()) @@ -243,21 +246,19 @@ def update(self, container, representation): # try to find parent read node self.connect_read_node(GN, namespace, json_f["assignTo"]) - # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') - - max_version = max(versions) + last_version_doc = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) # change color of node - if version.get("name") not in [max_version]: - GN["tile_color"].setValue(int("0xd84f20ff", 16)) + if version_doc["_id"] == last_version_doc["_id"]: + color_value = "0x3469ffff" else: - GN["tile_color"].setValue(int("0x3469ffff", 16)) + color_value = "0xd84f20ff" + + GN["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version_doc.get("name"))) def connect_read_node(self, group_node, asset, subset): """ diff --git a/openpype/hosts/nuke/plugins/load/load_effects_ip.py b/openpype/hosts/nuke/plugins/load/load_effects_ip.py index eaf151b3b86..44565c139d0 100644 --- a/openpype/hosts/nuke/plugins/load/load_effects_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_effects_ip.py @@ -3,6 +3,10 @@ import six import nuke +from openpype.client import ( + get_version_by_id, + get_last_version_by_subset_id, +) from openpype.pipeline import ( legacy_io, load, @@ -153,17 +157,16 @@ def update(self, container, representation): # get main variables # Get version from io - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + # get corresponding node GN = nuke.toNode(container['objectName']) file = get_representation_path(representation).replace("\\", "/") name = container['name'] - version_data = version.get("data", {}) - vname = version.get("name", None) + version_data = version_doc.get("data", {}) + vname = version_doc.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) workfile_first_frame = int(nuke.root()["first_frame"].getValue()) @@ -251,20 +254,18 @@ def update(self, container, representation): # return # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') - - max_version = max(versions) + last_version_doc = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) # change color of node - if version.get("name") not in [max_version]: - GN["tile_color"].setValue(int("0xd84f20ff", 16)) + if version_doc["_id"] == last_version_doc["_id"]: + color_value = "0x3469ffff" else: - GN["tile_color"].setValue(int("0x3469ffff", 16)) + color_value = "0xd84f20ff" + GN["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version_doc.get("name"))) def connect_active_viewer(self, group_node): """ diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo.py b/openpype/hosts/nuke/plugins/load/load_gizmo.py index 4ea9d64d7df..9a18eeef5cb 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo.py @@ -1,5 +1,9 @@ import nuke +from openpype.client import ( + get_version_by_id, + get_last_version_by_subset_id, +) from openpype.pipeline import ( legacy_io, load, @@ -101,17 +105,16 @@ def update(self, container, representation): # get main variables # Get version from io - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + # get corresponding node GN = nuke.toNode(container['objectName']) file = get_representation_path(representation).replace("\\", "/") name = container['name'] - version_data = version.get("data", {}) - vname = version.get("name", None) + version_data = version_doc.get("data", {}) + vname = version_doc.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) namespace = container['namespace'] @@ -148,21 +151,18 @@ def update(self, container, representation): GN.setXYpos(xpos, ypos) GN["name"].setValue(object_name) - # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') - - max_version = max(versions) + last_version_doc = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) # change color of node - if version.get("name") not in [max_version]: - GN["tile_color"].setValue(int("0xd88467ff", 16)) + if version_doc["_id"] == last_version_doc["_id"]: + color_value = self.node_color else: - GN["tile_color"].setValue(int(self.node_color, 16)) + color_value = "0xd88467ff" + GN["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version_doc.get("name"))) return update_container(GN, data_imprint) diff --git a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py index 38dd70935e3..2890dbfd2c1 100644 --- a/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py +++ b/openpype/hosts/nuke/plugins/load/load_gizmo_ip.py @@ -1,6 +1,10 @@ import nuke import six +from openpype.client import ( + get_version_by_id, + get_last_version_by_subset_id, +) from openpype.pipeline import ( legacy_io, load, @@ -108,17 +112,16 @@ def update(self, container, representation): # get main variables # Get version from io - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + # get corresponding node GN = nuke.toNode(container['objectName']) file = get_representation_path(representation).replace("\\", "/") name = container['name'] - version_data = version.get("data", {}) - vname = version.get("name", None) + version_data = version_doc.get("data", {}) + vname = version_doc.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) namespace = container['namespace'] @@ -155,21 +158,18 @@ def update(self, container, representation): GN.setXYpos(xpos, ypos) GN["name"].setValue(object_name) - # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') - - max_version = max(versions) + last_version_doc = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) # change color of node - if version.get("name") not in [max_version]: - GN["tile_color"].setValue(int("0xd88467ff", 16)) + if version_doc["_id"] == last_version_doc["_id"]: + color_value = self.node_color else: - GN["tile_color"].setValue(int(self.node_color, 16)) + color_value = "0xd88467ff" + GN["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version_doc.get("name"))) return update_container(GN, data_imprint) diff --git a/openpype/hosts/nuke/plugins/load/load_image.py b/openpype/hosts/nuke/plugins/load/load_image.py index 6df286a4f74..3e81ef999b5 100644 --- a/openpype/hosts/nuke/plugins/load/load_image.py +++ b/openpype/hosts/nuke/plugins/load/load_image.py @@ -2,6 +2,10 @@ import qargparse +from openpype.client import ( + get_version_by_id, + get_last_version_by_subset_id, +) from openpype.pipeline import ( legacy_io, load, @@ -186,20 +190,13 @@ def update(self, container, representation): format(frame_number, "0{}".format(padding))) # Get start frame from version data - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) - - # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') - - max_version = max(versions) + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + last_version_doc = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) - version_data = version.get("data", {}) + version_data = version_doc.get("data", {}) last = first = int(frame_number) @@ -215,7 +212,7 @@ def update(self, container, representation): "representation": str(representation["_id"]), "frameStart": str(first), "frameEnd": str(last), - "version": str(version.get("name")), + "version": str(version_doc.get("name")), "colorspace": version_data.get("colorspace"), "source": version_data.get("source"), "fps": str(version_data.get("fps")), @@ -223,17 +220,18 @@ def update(self, container, representation): }) # change color of node - if version.get("name") not in [max_version]: - node["tile_color"].setValue(int("0xd84f20ff", 16)) + if version_doc["_id"] == last_version_doc["_id"]: + color_value = "0x4ecd25ff" else: - node["tile_color"].setValue(int("0x4ecd25ff", 16)) + color_value = "0xd84f20ff" + node["tile_color"].setValue(int(color_value, 16)) # Update the imprinted representation update_container( node, updated_dict ) - self.log.info("updated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version_doc.get("name"))) def remove(self, container): node = nuke.toNode(container['objectName']) diff --git a/openpype/hosts/nuke/plugins/load/load_model.py b/openpype/hosts/nuke/plugins/load/load_model.py index 2f54595cb0a..c317b154501 100644 --- a/openpype/hosts/nuke/plugins/load/load_model.py +++ b/openpype/hosts/nuke/plugins/load/load_model.py @@ -1,5 +1,9 @@ import nuke +from openpype.client import ( + get_version_by_id, + get_last_version_by_subset_id, +) from openpype.pipeline import ( legacy_io, load, @@ -100,17 +104,15 @@ def update(self, container, representation): None """ # Get version from io - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) object_name = container['objectName'] # get corresponding node model_node = nuke.toNode(object_name) # get main variables - version_data = version.get("data", {}) - vname = version.get("name", None) + version_data = version_doc.get("data", {}) + vname = version_doc.get("name", None) first = version_data.get("frameStart", None) last = version_data.get("frameEnd", None) fps = version_data.get("fps") or nuke.root()["fps"].getValue() @@ -163,28 +165,26 @@ def update(self, container, representation): d.setInput(index, model_node) # color node by correct color by actual version - self.node_version_color(version, model_node) + self.node_version_color(version_doc, model_node) - self.log.info("updated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version_doc.get("name"))) return update_container(model_node, data_imprint) def node_version_color(self, version, node): - """ Coloring a node by correct color by actual version - """ - # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') + """ Coloring a node by correct color by actual version""" - max_version = max(versions) + project_name = legacy_io.active_project() + last_version_doc = get_last_version_by_subset_id( + project_name, version["parent"], fields=["_id"] + ) # change color of node - if version.get("name") not in [max_version]: - node["tile_color"].setValue(int("0xd88467ff", 16)) + if version["_id"] == last_version_doc["_id"]: + color_value = self.node_color else: - node["tile_color"].setValue(int(self.node_color, 16)) + color_value = "0xd88467ff" + node["tile_color"].setValue(int(color_value, 16)) def switch(self, container, representation): self.update(container, representation) diff --git a/openpype/hosts/nuke/plugins/load/load_script_precomp.py b/openpype/hosts/nuke/plugins/load/load_script_precomp.py index bd351ad7858..21e384b538e 100644 --- a/openpype/hosts/nuke/plugins/load/load_script_precomp.py +++ b/openpype/hosts/nuke/plugins/load/load_script_precomp.py @@ -1,5 +1,9 @@ import nuke +from openpype.client import ( + get_version_by_id, + get_last_version_by_subset_id, +) from openpype.pipeline import ( legacy_io, load, @@ -116,29 +120,23 @@ def update(self, container, representation): root = get_representation_path(representation).replace("\\", "/") # Get start frame from version data - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) - - # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct('name') - - max_version = max(versions) + project_name = legacy_io.active_project() + version_doc = get_version_by_id(project_name, representation["parent"]) + last_version_doc = get_last_version_by_subset_id( + project_name, version_doc["parent"], fields=["_id"] + ) updated_dict = {} + version_data = version_doc["data"] updated_dict.update({ "representation": str(representation["_id"]), - "frameEnd": version["data"].get("frameEnd"), - "version": version.get("name"), - "colorspace": version["data"].get("colorspace"), - "source": version["data"].get("source"), - "handles": version["data"].get("handles"), - "fps": version["data"].get("fps"), - "author": version["data"].get("author") + "frameEnd": version_data.get("frameEnd"), + "version": version_doc.get("name"), + "colorspace": version_data.get("colorspace"), + "source": version_data.get("source"), + "handles": version_data.get("handles"), + "fps": version_data.get("fps"), + "author": version_data.get("author") }) # Update the imprinted representation @@ -150,12 +148,13 @@ def update(self, container, representation): node["file"].setValue(root) # change color of node - if version.get("name") not in [max_version]: - node["tile_color"].setValue(int("0xd84f20ff", 16)) + if version_doc["_id"] == last_version_doc["_id"]: + color_value = "0xff0ff0ff" else: - node["tile_color"].setValue(int("0xff0ff0ff", 16)) + color_value = "0xd84f20ff" + node["tile_color"].setValue(int(color_value, 16)) - self.log.info("updated to version: {}".format(version.get("name"))) + self.log.info("updated to version: {}".format(version_doc.get("name"))) def remove(self, container): node = nuke.toNode(container['objectName']) From f919c24989739e40e46028b3dfdd82642e38507c Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 22 Jun 2022 14:20:05 +0200 Subject: [PATCH 22/52] remove unnecessary query of asset document --- .../hosts/nuke/plugins/publish/precollect_instances.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_instances.py b/openpype/hosts/nuke/plugins/publish/precollect_instances.py index 8bf7280ceaa..4b3b70fa12c 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_instances.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_instances.py @@ -1,7 +1,6 @@ import nuke import pyblish.api -from openpype.pipeline import legacy_io from openpype.hosts.nuke.api.lib import ( add_publish_knob, get_avalon_knob_data @@ -20,12 +19,6 @@ class PreCollectNukeInstances(pyblish.api.ContextPlugin): sync_workfile_version_on_families = [] def process(self, context): - asset_data = legacy_io.find_one({ - "type": "asset", - "name": legacy_io.Session["AVALON_ASSET"] - }) - - self.log.debug("asset_data: {}".format(asset_data["data"])) instances = [] root = nuke.root() From 2ce3200fa5579fc18963f0e25023156179c3996d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 22 Jun 2022 14:21:22 +0200 Subject: [PATCH 23/52] use query functions in validate script plugin --- .../nuke/plugins/publish/validate_script.py | 54 +++++++++++++++---- 1 file changed, 43 insertions(+), 11 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/validate_script.py b/openpype/hosts/nuke/plugins/publish/validate_script.py index 10c9e93f8b6..9bda0da85e6 100644 --- a/openpype/hosts/nuke/plugins/publish/validate_script.py +++ b/openpype/hosts/nuke/plugins/publish/validate_script.py @@ -1,5 +1,6 @@ import pyblish.api +from openpype.client import get_project, get_asset_by_id from openpype import lib from openpype.pipeline import legacy_io @@ -19,6 +20,7 @@ def process(self, instance): asset_name = ctx_data["asset"] asset = lib.get_asset(asset_name) asset_data = asset["data"] + project_name = legacy_io.active_project() # These attributes will be checked attributes = [ @@ -48,12 +50,19 @@ def process(self, instance): asset_attributes[attr] = asset_data[attr] elif attr in hierarchical_attributes: - # Try to find fps on parent - parent = asset['parent'] + # TODO this should be probably removed + # Hierarchical attributes is not a thing since Pype 2? + + # Try to find attribute on parent + parent_id = asset['parent'] + parent_type = "project" if asset_data['visualParent'] is not None: - parent = asset_data['visualParent'] + parent_type = "asset" + parent_id = asset_data['visualParent'] - value = self.check_parent_hierarchical(parent, attr) + value = self.check_parent_hierarchical( + project_name, parent_type, parent_id, attr + ) if value is None: missing_attributes.append(attr) else: @@ -113,12 +122,35 @@ def process(self, instance): message = msg.format(", ".join(not_matching)) raise ValueError(message) - def check_parent_hierarchical(self, entityId, attr): - if entityId is None: + def check_parent_hierarchical( + self, project_name, parent_type, parent_id, attr + ): + if parent_id is None: return None - entity = legacy_io.find_one({"_id": entityId}) - if attr in entity['data']: + + doc = None + if parent_type == "project": + doc = get_project(project_name) + elif parent_type == "asset": + doc = get_asset_by_id(project_name, parent_id) + + if not doc: + return None + + doc_data = doc["data"] + if attr in doc_data: self.log.info(attr) - return entity['data'][attr] - else: - return self.check_parent_hierarchical(entity['parent'], attr) + return doc_data[attr] + + if parent_type == "project": + return None + + parent_id = doc_data.get("visualParent") + new_parent_type = "asset" + if parent_id is None: + parent_id = doc["parent"] + new_parent_type = "project" + + return self.check_parent_hierarchical( + project_name, new_parent_type, parent_id, attr + ) From 796f32bccdfd171dc8f768227bcdff4d41c53e8d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 22 Jun 2022 14:23:35 +0200 Subject: [PATCH 24/52] use query functions in publish plugins --- .../nuke/plugins/publish/collect_reads.py | 12 ++++----- .../nuke/plugins/publish/precollect_writes.py | 26 ++++++++++++++----- 2 files changed, 25 insertions(+), 13 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/collect_reads.py b/openpype/hosts/nuke/plugins/publish/collect_reads.py index 4d6944f5230..b79d9646d59 100644 --- a/openpype/hosts/nuke/plugins/publish/collect_reads.py +++ b/openpype/hosts/nuke/plugins/publish/collect_reads.py @@ -3,6 +3,7 @@ import nuke import pyblish.api +from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io @@ -16,12 +17,11 @@ class CollectNukeReads(pyblish.api.InstancePlugin): families = ["source"] def process(self, instance): - asset_data = legacy_io.find_one({ - "type": "asset", - "name": legacy_io.Session["AVALON_ASSET"] - }) + project_name = legacy_io.active_project() + asset_name = legacy_io.Session["AVALON_ASSET"] + asset_doc = get_asset_by_name(project_name, asset_name) - self.log.debug("asset_data: {}".format(asset_data["data"])) + self.log.debug("asset_doc: {}".format(asset_doc["data"])) self.log.debug("checking instance: {}".format(instance)) @@ -127,7 +127,7 @@ def process(self, instance): "frameStart": first_frame, "frameEnd": last_frame, "colorspace": colorspace, - "handles": int(asset_data["data"].get("handles", 0)), + "handles": int(asset_doc["data"].get("handles", 0)), "step": 1, "fps": int(nuke.root()['fps'].value()) }) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index 7e50679ed59..a7c07975e2c 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -4,7 +4,10 @@ import nuke import pyblish.api -import openpype.api as pype +from openpype.client import ( + get_last_version_by_subset_name, + get_representations, +) from openpype.pipeline import ( legacy_io, get_representation_path, @@ -180,17 +183,26 @@ def process(self, instance): if not instance.data["review"]: instance.data["useSequenceForReview"] = False + project_name = legacy_io.active_project() + asset_name = instance.data["asset"] # * Add audio to instance if exists. # Find latest versions document - version_doc = pype.get_latest_version( - instance.data["asset"], "audioMain" + last_version_doc = get_last_version_by_subset_name( + project_name, "audioMain", asset_name=asset_name, fields=["_id"] ) + repre_doc = None - if version_doc: + if last_version_doc: # Try to find it's representation (Expected there is only one) - repre_doc = legacy_io.find_one( - {"type": "representation", "parent": version_doc["_id"]} - ) + repre_docs = list(get_representations( + project_name, version_ids=[last_version_doc["_id"]] + )) + if not repre_docs: + self.log.warning( + "Version document does not contain any representations" + ) + else: + repre_doc = repre_docs[0] # Add audio to instance if representation was found if repre_doc: From 20bee203523e2979df32f010a305024d94a4310e Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 22 Jun 2022 14:25:23 +0200 Subject: [PATCH 25/52] use query functions in nuke lib --- openpype/hosts/nuke/api/lib.py | 135 ++++++++++++++++++++++----------- 1 file changed, 89 insertions(+), 46 deletions(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 7e44aaa7c5e..57a81f7909f 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -7,10 +7,16 @@ from collections import OrderedDict import clique -from bson.objectid import ObjectId import nuke +from openpype.client import ( + get_project, + get_asset_by_name, + get_versions, + get_last_versions, + get_representations, +) from openpype.api import ( Logger, Anatomy, @@ -734,47 +740,84 @@ def check_inventory_versions(): from .pipeline import parse_container # get all Loader nodes by avalon attribute metadata - for each in nuke.allNodes(): - container = parse_container(each) + node_with_repre_id = [] + repre_ids = set() + # Find all containers and collect it's node and representation ids + for node in nuke.allNodes(): + container = parse_container(node) if container: node = nuke.toNode(container["objectName"]) avalon_knob_data = read_avalon_data(node) + repre_id = avalon_knob_data["representation"] - # get representation from io - representation = legacy_io.find_one({ - "type": "representation", - "_id": ObjectId(avalon_knob_data["representation"]) - }) + repre_ids.add(repre_id) + node_with_repre_id.append((node, repre_id)) - # Failsafe for not finding the representation. - if not representation: - log.warning( - "Could not find the representation on " - "node \"{}\"".format(node.name()) - ) - continue + # Skip if nothing was found + if not repre_ids: + return - # Get start frame from version data - version = legacy_io.find_one({ - "type": "version", - "_id": representation["parent"] - }) + project_name = legacy_io.active_project() + # Find representations based on found containers + repre_docs = get_representations( + project_name, + repre_ids=repre_ids, + fields=["_id", "parent"] + ) + # Store representations by id and collect version ids + repre_docs_by_id = {} + version_ids = set() + for repre_doc in repre_docs: + # Use stringed representation id to match value in containers + repre_id = str(repre_doc["_id"]) + repre_docs_by_id[repre_id] = repre_doc + version_ids.add(repre_doc["parent"]) + + version_docs = get_versions( + project_name, version_ids, fields=["_id", "name", "parent"] + ) + # Store versions by id and collect subset ids + version_docs_by_id = {} + subset_ids = set() + for version_doc in version_docs: + version_docs_by_id[version_doc["_id"]] = version_doc + subset_ids.add(version_doc["parent"]) + + # Query last versions based on subset ids + last_versions_by_subset_id = get_last_versions( + project_name, subset_ids=subset_ids, fields=["_id", "parent"] + ) - # get all versions in list - versions = legacy_io.find({ - "type": "version", - "parent": version["parent"] - }).distinct("name") + # Loop through collected container nodes and their representation ids + for item in node_with_repre_id: + # Some python versions of nuke can't unfold tuple in for loop + node, repre_id = item + repre_doc = repre_docs_by_id.get(repre_id) + # Failsafe for not finding the representation. + if not repre_doc: + log.warning(( + "Could not find the representation on node \"{}\"" + ).format(node.name())) + continue - max_version = max(versions) + version_id = repre_doc["parent"] + version_doc = version_docs_by_id.get(version_id) + if not version_doc: + log.warning(( + "Could not find the version on node \"{}\"" + ).format(node.name())) + continue - # check the available version and do match - # change color of node if not max version - if version.get("name") not in [max_version]: - node["tile_color"].setValue(int("0xd84f20ff", 16)) - else: - node["tile_color"].setValue(int("0x4ecd25ff", 16)) + # Get last version based on subset id + subset_id = version_doc["parent"] + last_version = last_versions_by_subset_id[subset_id] + # Check if last version is same as current version + if last_version["_id"] == version_doc["_id"]: + color_value = "0x4ecd25ff" + else: + color_value = "0xd84f20ff" + node["tile_color"].setValue(int(color_value, 16)) def writes_version_sync(): @@ -899,11 +942,9 @@ def format_anatomy(data): file = script_name() data["version"] = get_version_from_path(file) - project_doc = legacy_io.find_one({"type": "project"}) - asset_doc = legacy_io.find_one({ - "type": "asset", - "name": data["avalon"]["asset"] - }) + project_name = anatomy.project_name + project_doc = get_project(project_name) + asset_doc = get_asset_by_name(project_name, data["avalon"]["asset"]) task_name = os.environ["AVALON_TASK"] host_name = os.environ["AVALON_APP"] context_data = get_workdir_data( @@ -1692,12 +1733,13 @@ class WorkfileSettings(object): """ - def __init__(self, - root_node=None, - nodes=None, - **kwargs): - Context._project_doc = kwargs.get( - "project") or legacy_io.find_one({"type": "project"}) + def __init__(self, root_node=None, nodes=None, **kwargs): + project_doc = kwargs.get("project") + if project_doc is None: + project_name = legacy_io.active_project() + project_doc = get_project(project_name) + + Context._project_doc = project_doc self._asset = ( kwargs.get("asset_name") or legacy_io.Session["AVALON_ASSET"] @@ -2047,9 +2089,10 @@ def reset_frame_range_handles(self): def reset_resolution(self): """Set resolution to project resolution.""" log.info("Resetting resolution") - project = legacy_io.find_one({"type": "project"}) - asset = legacy_io.Session["AVALON_ASSET"] - asset = legacy_io.find_one({"name": asset, "type": "asset"}) + project_name = legacy_io.active_project() + project = get_project(project_name) + asset_name = legacy_io.Session["AVALON_ASSET"] + asset = get_asset_by_name(project_name, asset_name) asset_data = asset.get('data', {}) data = { From bcecebf9ff016d5fd8b8480769783a71e8cb6d1a Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 22 Jun 2022 16:58:11 +0200 Subject: [PATCH 26/52] husdoutputprocessors is using project anatomy and query functions --- .../avalon_uri_processor.py | 31 +++++-------------- 1 file changed, 7 insertions(+), 24 deletions(-) diff --git a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py index 01a29472e78..202287f1c39 100644 --- a/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py +++ b/openpype/hosts/houdini/vendor/husdoutputprocessors/avalon_uri_processor.py @@ -4,19 +4,9 @@ import colorbleed.usdlib as usdlib -from openpype.pipeline import ( - legacy_io, - registered_root, -) - - -def _get_project_publish_template(): - """Return publish template from database for current project""" - project = legacy_io.find_one( - {"type": "project"}, - projection={"config.template.publish": True} - ) - return project["config"]["template"]["publish"] +from openpype.client import get_asset_by_name +from openpype.api import Anatomy +from openpype.pipeline import legacy_io class AvalonURIOutputProcessor(base.OutputProcessorBase): @@ -35,7 +25,6 @@ def __init__(self): ever created in a Houdini session. Therefore be very careful about what data gets put in this object. """ - self._template = None self._use_publish_paths = False self._cache = dict() @@ -60,14 +49,11 @@ def parameters(self): return self._parameters def beginSave(self, config_node, t): - self._template = _get_project_publish_template() - parm = self._parms["use_publish_paths"] self._use_publish_paths = config_node.parm(parm).evalAtTime(t) self._cache.clear() def endSave(self): - self._template = None self._use_publish_paths = None self._cache.clear() @@ -138,22 +124,19 @@ def _get_usd_master_path(self, """ PROJECT = legacy_io.Session["AVALON_PROJECT"] - asset_doc = legacy_io.find_one({ - "name": asset, - "type": "asset" - }) + anatomy = Anatomy(PROJECT) + asset_doc = get_asset_by_name(PROJECT, asset) if not asset_doc: raise RuntimeError("Invalid asset name: '%s'" % asset) - root = registered_root() - path = self._template.format(**{ - "root": root, + formatted_anatomy = anatomy.format({ "project": PROJECT, "asset": asset_doc["name"], "subset": subset, "representation": ext, "version": 0 # stub version zero }) + path = formatted_anatomy["publish"]["path"] # Remove the version folder subset_folder = os.path.dirname(os.path.dirname(path)) From a4b09cccc21c1ade1d170fc23cd112e8efc9f4e4 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Wed, 22 Jun 2022 17:39:30 +0200 Subject: [PATCH 27/52] use query functions in houdini --- openpype/hosts/houdini/api/lib.py | 35 ++++++++-------- openpype/hosts/houdini/api/usd.py | 7 ++-- .../houdini/plugins/create/create_hda.py | 24 +++++------ .../plugins/publish/collect_usd_bootstrap.py | 22 +++++----- .../plugins/publish/extract_usd_layered.py | 41 ++++++++++--------- .../validate_usd_shade_model_exists.py | 25 ++++------- 6 files changed, 74 insertions(+), 80 deletions(-) diff --git a/openpype/hosts/houdini/api/lib.py b/openpype/hosts/houdini/api/lib.py index 96ca019f8fb..dd8a5ba4732 100644 --- a/openpype/hosts/houdini/api/lib.py +++ b/openpype/hosts/houdini/api/lib.py @@ -4,6 +4,7 @@ import six +from openpype.client import get_asset_by_name from openpype.api import get_asset from openpype.pipeline import legacy_io @@ -74,16 +75,13 @@ def generate_ids(nodes, asset_id=None): """ if asset_id is None: + project_name = legacy_io.active_project() + asset_name = legacy_io.Session["AVALON_ASSET"] # Get the asset ID from the database for the asset of current context - asset_data = legacy_io.find_one( - { - "type": "asset", - "name": legacy_io.Session["AVALON_ASSET"] - }, - projection={"_id": True} - ) - assert asset_data, "No current asset found in Session" - asset_id = asset_data['_id'] + asset_doc = get_asset_by_name(project_name, asset_name, fields=["_id"]) + + assert asset_doc, "No current asset found in Session" + asset_id = asset_doc['_id'] node_ids = [] for node in nodes: @@ -430,26 +428,29 @@ def maintained_selection(): def reset_framerange(): """Set frame range to current asset""" + project_name = legacy_io.active_project() asset_name = legacy_io.Session["AVALON_ASSET"] - asset = legacy_io.find_one({"name": asset_name, "type": "asset"}) + # Get the asset ID from the database for the asset of current context + asset_doc = get_asset_by_name(project_name, asset_name) + asset_data = asset_doc["data"] - frame_start = asset["data"].get("frameStart") - frame_end = asset["data"].get("frameEnd") + frame_start = asset_data.get("frameStart") + frame_end = asset_data.get("frameEnd") # Backwards compatibility if frame_start is None or frame_end is None: - frame_start = asset["data"].get("edit_in") - frame_end = asset["data"].get("edit_out") + frame_start = asset_data.get("edit_in") + frame_end = asset_data.get("edit_out") if frame_start is None or frame_end is None: log.warning("No edit information found for %s" % asset_name) return - handles = asset["data"].get("handles") or 0 - handle_start = asset["data"].get("handleStart") + handles = asset_data.get("handles") or 0 + handle_start = asset_data.get("handleStart") if handle_start is None: handle_start = handles - handle_end = asset["data"].get("handleEnd") + handle_end = asset_data.get("handleEnd") if handle_end is None: handle_end = handles diff --git a/openpype/hosts/houdini/api/usd.py b/openpype/hosts/houdini/api/usd.py index e9991e38ecc..4f4a3d8e6fb 100644 --- a/openpype/hosts/houdini/api/usd.py +++ b/openpype/hosts/houdini/api/usd.py @@ -6,6 +6,7 @@ from Qt import QtWidgets, QtCore, QtGui from openpype import style +from openpype.client import get_asset_by_name from openpype.pipeline import legacy_io from openpype.tools.utils.assets_widget import SingleSelectAssetsWidget @@ -46,10 +47,8 @@ def _on_show(self): select_id = None name = self._parm.eval() if name: - db_asset = legacy_io.find_one( - {"name": name, "type": "asset"}, - {"_id": True} - ) + project_name = legacy_io.active_project() + db_asset = get_asset_by_name(project_name, name, fields=["_id"]) if db_asset: select_id = db_asset["_id"] diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index 5fc78c75397..d15d5bcd297 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -1,6 +1,10 @@ # -*- coding: utf-8 -*- import hou +from openpye.client import ( + get_asset_by_name, + get_subsets, +) from openpype.pipeline import legacy_io from openpype.hosts.houdini.api import lib from openpype.hosts.houdini.api import plugin @@ -23,20 +27,16 @@ def _check_existing(self, subset_name): # type: (str) -> bool """Check if existing subset name versions already exists.""" # Get all subsets of the current asset - asset_id = legacy_io.find_one( - {"name": self.data["asset"], "type": "asset"}, - projection={"_id": True} - )['_id'] - subset_docs = legacy_io.find( - { - "type": "subset", - "parent": asset_id - }, - {"name": 1} + project_name = legacy_io.active_project() + asset_doc = get_asset_by_name( + project_name, self.data["asset"], fields=["_id"] + ) + subset_docs = get_subsets( + project_name, asset_ids=[asset_doc["_id"]], fields=["name"] ) - existing_subset_names = set(subset_docs.distinct("name")) existing_subset_names_low = { - _name.lower() for _name in existing_subset_names + subset_doc["name"].lower() + for subset_doc in subset_docs } return subset_name.lower() in existing_subset_names_low diff --git a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py index 3f0d10e0ba0..cf8d61cda32 100644 --- a/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py +++ b/openpype/hosts/houdini/plugins/publish/collect_usd_bootstrap.py @@ -1,5 +1,6 @@ import pyblish.api +from openyppe.client import get_subset_by_name, get_asset_by_name from openpype.pipeline import legacy_io import openpype.lib.usdlib as usdlib @@ -50,10 +51,8 @@ def add_bootstrap(self, instance, bootstrap): self.log.debug("Add bootstrap for: %s" % bootstrap) - asset = legacy_io.find_one({ - "name": instance.data["asset"], - "type": "asset" - }) + project_name = legacy_io.active_project() + asset = get_asset_by_name(project_name, instance.data["asset"]) assert asset, "Asset must exist: %s" % asset # Check which are not about to be created and don't exist yet @@ -70,7 +69,7 @@ def add_bootstrap(self, instance, bootstrap): self.log.debug("Checking required bootstrap: %s" % required) for subset in required: - if self._subset_exists(instance, subset, asset): + if self._subset_exists(project_name, instance, subset, asset): continue self.log.debug( @@ -93,7 +92,7 @@ def add_bootstrap(self, instance, bootstrap): for key in ["asset"]: new.data[key] = instance.data[key] - def _subset_exists(self, instance, subset, asset): + def _subset_exists(self, project_name, instance, subset, asset): """Return whether subset exists in current context or in database.""" # Allow it to be created during this publish session context = instance.context @@ -106,9 +105,8 @@ def _subset_exists(self, instance, subset, asset): # Or, if they already exist in the database we can # skip them too. - return bool( - legacy_io.find_one( - {"name": subset, "type": "subset", "parent": asset["_id"]}, - {"_id": True} - ) - ) + if get_subset_by_name( + project_name, subset, asset["_id"], fields=["_id"] + ): + return True + return False diff --git a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py index bfcd93c1cb3..80919c023ba 100644 --- a/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py +++ b/openpype/hosts/houdini/plugins/publish/extract_usd_layered.py @@ -7,6 +7,12 @@ import pyblish.api import openpype.api +from openpype.client import ( + get_asset_by_name, + get_subset_by_name, + get_last_version_by_subset_id, + get_representation_by_name, +) from openpype.pipeline import ( get_representation_path, legacy_io, @@ -244,11 +250,14 @@ def process(self, instance): # Set up the dependency for publish if they have new content # compared to previous publishes + project_name = legacy_io.active_project() for dependency in active_dependencies: dependency_fname = dependency.data["usdFilename"] filepath = os.path.join(staging_dir, dependency_fname) - similar = self._compare_with_latest_publish(dependency, filepath) + similar = self._compare_with_latest_publish( + project_name, dependency, filepath + ) if similar: # Deactivate this dependency self.log.debug( @@ -268,7 +277,7 @@ def process(self, instance): instance.data["files"] = [] instance.data["files"].append(fname) - def _compare_with_latest_publish(self, dependency, new_file): + def _compare_with_latest_publish(self, project_name, dependency, new_file): import filecmp _, ext = os.path.splitext(new_file) @@ -276,35 +285,29 @@ def _compare_with_latest_publish(self, dependency, new_file): # Compare this dependency with the latest published version # to detect whether we should make this into a new publish # version. If not, skip it. - asset = legacy_io.find_one( - {"name": dependency.data["asset"], "type": "asset"} + asset = get_asset_by_name( + project_name, dependency.data["asset"], fields=["_id"] ) - subset = legacy_io.find_one( - { - "name": dependency.data["subset"], - "type": "subset", - "parent": asset["_id"], - } + subset = get_subset_by_name( + project_name, + dependency.data["subset"], + asset["_id"], + fields=["_id"] ) if not subset: # Subset doesn't exist yet. Definitely new file self.log.debug("No existing subset..") return False - version = legacy_io.find_one( - {"type": "version", "parent": subset["_id"], }, - sort=[("name", -1)] + version = get_last_version_by_subset_id( + project_name, subset["_id"], fields=["_id"] ) if not version: self.log.debug("No existing version..") return False - representation = legacy_io.find_one( - { - "name": ext.lstrip("."), - "type": "representation", - "parent": version["_id"], - } + representation = get_representation_by_name( + project_name, ext.lstrip("."), version["_id"] ) if not representation: self.log.debug("No existing representation..") diff --git a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py index 44719ae4883..b979b87d847 100644 --- a/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py +++ b/openpype/hosts/houdini/plugins/publish/validate_usd_shade_model_exists.py @@ -2,6 +2,7 @@ import pyblish.api +from openpype.client import get_subset_by_name import openpype.api from openpype.pipeline import legacy_io @@ -15,31 +16,23 @@ class ValidateUSDShadeModelExists(pyblish.api.InstancePlugin): label = "USD Shade model exists" def process(self, instance): - - asset = instance.data["asset"] + project_name = legacy_io.active_project() + asset_name = instance.data["asset"] subset = instance.data["subset"] # Assume shading variation starts after a dot separator shade_subset = subset.split(".", 1)[0] model_subset = re.sub("^usdShade", "usdModel", shade_subset) - asset_doc = legacy_io.find_one( - {"name": asset, "type": "asset"}, - {"_id": True} - ) + asset_doc = instance.data.get("assetEntity") if not asset_doc: - raise RuntimeError("Asset does not exist: %s" % asset) - - subset_doc = legacy_io.find_one( - { - "name": model_subset, - "type": "subset", - "parent": asset_doc["_id"], - }, - {"_id": True} + raise RuntimeError("Asset document is not filled on instance.") + + subset_doc = get_subset_by_name( + project_name, model_subset, asset_doc["_id"], fields=["_id"] ) if not subset_doc: raise RuntimeError( "USD Model subset not found: " - "%s (%s)" % (model_subset, asset) + "%s (%s)" % (model_subset, asset_name) ) From 25b7f98022e7ac3a9e85352713130ba1121a05bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?F=C3=A9lix=20David?= Date: Thu, 23 Jun 2022 12:59:01 +0200 Subject: [PATCH 28/52] Renaming: Kitsu Plural func sync_all_projects --- openpype/modules/kitsu/kitsu_module.py | 4 ++-- openpype/modules/kitsu/utils/update_op_with_zou.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/modules/kitsu/kitsu_module.py b/openpype/modules/kitsu/kitsu_module.py index 8e7ab6f78c3..d19d14dda77 100644 --- a/openpype/modules/kitsu/kitsu_module.py +++ b/openpype/modules/kitsu/kitsu_module.py @@ -129,8 +129,8 @@ def sync_service(login, password): login (str): Kitsu user login password (str): Kitsu user password """ - from .utils.update_op_with_zou import sync_all_project + from .utils.update_op_with_zou import sync_all_projects from .utils.sync_service import start_listeners - sync_all_project(login, password) + sync_all_projects(login, password) start_listeners(login, password) diff --git a/openpype/modules/kitsu/utils/update_op_with_zou.py b/openpype/modules/kitsu/utils/update_op_with_zou.py index 08e50d959b2..cd98c0d2042 100644 --- a/openpype/modules/kitsu/utils/update_op_with_zou.py +++ b/openpype/modules/kitsu/utils/update_op_with_zou.py @@ -250,7 +250,7 @@ def write_project_to_op(project: dict, dbcon: AvalonMongoDB) -> UpdateOne: ) -def sync_all_project(login: str, password: str): +def sync_all_projects(login: str, password: str): """Update all OP projects in DB with Zou data. Args: From 5325b6eb508d695ea29be31c3ff73347ada7346b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 23 Jun 2022 13:57:57 +0200 Subject: [PATCH 29/52] :package: update OIIO for linux --- pyproject.toml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4b297fe0422..5bf4f18db80 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -114,6 +114,8 @@ build-backend = "poetry.core.masonry.api" # https://pip.pypa.io/en/stable/cli/pip_install/#requirement-specifiers version = "==5.15.2" +# TODO: we will need to handle different linux flavours here and +# also different macos versions too. [openpype.thirdparty.ffmpeg.windows] url = "https://distribute.openpype.io/thirdparty/ffmpeg-4.4-windows.zip" hash = "dd51ba29d64ee238e7c4c3c7301b19754c3f0ee2e2a729c20a0e2789e72db925" @@ -131,8 +133,8 @@ url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.3.10-windows.zip" hash = "b9950f5d2fa3720b52b8be55bacf5f56d33f9e029d38ee86534995f3d8d253d2" [openpype.thirdparty.oiio.linux] -url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.2.12-linux.tgz" -hash = "de63a8bf7f6c45ff59ecafeba13123f710c2cbc1783ec9e0b938e980d4f5c37f" +url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.2.20-linux-centos7" +hash = "BE1ABF8A50E9DA5913298447421AF0A17829D83ED6252AE1D40DA7FA36A78787" [openpype.thirdparty.oiio.darwin] url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-darwin.tgz" From 68ee05bd36312f7bb5f476125dee26bf7241f239 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 23 Jun 2022 14:00:18 +0200 Subject: [PATCH 30/52] filter representations before integration starts --- openpype/plugins/publish/integrate_new.py | 62 +++++++++++++++-------- 1 file changed, 41 insertions(+), 21 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 24711052509..918ca4ba949 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -145,9 +145,43 @@ def process(self, instance): if instance.data.get("farm"): return + # Prepare repsentations that should be integrated + repres = instance.data.get("representations") + # Raise error if instance don't have any representations + if not repres: + raise ValueError( + "Instance {} has no files to transfer".format( + instance.data["family"] + ) + ) + + # Validate type of stored representations + if not isinstance(repres, (list, tuple)): + raise TypeError( + "Instance 'files' must be a list, got: {0} {1}".format( + str(type(repres)), str(repres) + ) + ) + + # Filter representations + filtered_repres = [] + for repre in repres: + if "delete" in repre.get("tags", []): + continue + filtered_repres.append(repre) + + # Skip instance if there are not representations to integrate + # all representations should not be integrated + if not filtered_repres: + self.log.warning(( + "Skipping, there are no representations" + " to integrate for instance {}" + ).format(instance.data["family"])) + return + self.integrated_file_sizes = {} try: - self.register(instance) + self.register(instance, filtered_repres) self.log.info("Integrated Asset in to the database ...") self.log.info("instance.data: {}".format(instance.data)) self.handle_destination_files(self.integrated_file_sizes, @@ -158,7 +192,7 @@ def process(self, instance): self.handle_destination_files(self.integrated_file_sizes, 'remove') six.reraise(*sys.exc_info()) - def register(self, instance): + def register(self, instance, repres): # Required environment variables anatomy_data = instance.data["anatomyData"] @@ -236,18 +270,6 @@ def register(self, instance): "Establishing staging directory @ {0}".format(stagingdir) ) - # Ensure at least one file is set up for transfer in staging dir. - repres = instance.data.get("representations") - repres = instance.data.get("representations") - msg = "Instance {} has no files to transfer".format( - instance.data["family"]) - assert repres, msg - assert isinstance(repres, (list, tuple)), ( - "Instance 'files' must be a list, got: {0} {1}".format( - str(type(repres)), str(repres) - ) - ) - subset = self.get_subset(asset_entity, instance) instance.data["subsetEntity"] = subset @@ -270,7 +292,10 @@ def register(self, instance): self.log.debug("Creating version ...") - new_repre_names_low = [_repre["name"].lower() for _repre in repres] + new_repre_names_low = [ + _repre["name"].lower() + for _repre in repres + ] existing_version = legacy_io.find_one({ 'type': 'version', @@ -373,18 +398,13 @@ def register(self, instance): if profile: template_name = profile["template_name"] - - published_representations = {} - for idx, repre in enumerate(instance.data["representations"]): + for idx, repre in enumerate(repres): # reset transfers for next representation # instance.data['transfers'] is used as a global variable # in current codebase instance.data['transfers'] = list(orig_transfers) - if "delete" in repre.get("tags", []): - continue - published_files = [] # create template data for Anatomy From 7cb38127658e2ba43780e0a537350f407229f5d1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Thu, 23 Jun 2022 14:09:47 +0200 Subject: [PATCH 31/52] moved reset of tranfers to the end of repre iteration --- openpype/plugins/publish/integrate_new.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/openpype/plugins/publish/integrate_new.py b/openpype/plugins/publish/integrate_new.py index 918ca4ba949..4c14c17dae8 100644 --- a/openpype/plugins/publish/integrate_new.py +++ b/openpype/plugins/publish/integrate_new.py @@ -400,11 +400,6 @@ def register(self, instance, repres): published_representations = {} for idx, repre in enumerate(repres): - # reset transfers for next representation - # instance.data['transfers'] is used as a global variable - # in current codebase - instance.data['transfers'] = list(orig_transfers) - published_files = [] # create template data for Anatomy @@ -682,6 +677,10 @@ def register(self, instance, repres): "published_files": published_files } self.log.debug("__ representations: {}".format(representations)) + # reset transfers for next representation + # instance.data['transfers'] is used as a global variable + # in current codebase + instance.data['transfers'] = list(orig_transfers) # Remove old representations if there are any (before insertion of new) if existing_repres: From 0a20a69e0e37a656d2004abf17259029c1750d6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 23 Jun 2022 14:46:06 +0200 Subject: [PATCH 32/52] :bug: fix extension --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5bf4f18db80..3a5acb84907 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -133,8 +133,8 @@ url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.3.10-windows.zip" hash = "b9950f5d2fa3720b52b8be55bacf5f56d33f9e029d38ee86534995f3d8d253d2" [openpype.thirdparty.oiio.linux] -url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.2.20-linux-centos7" -hash = "BE1ABF8A50E9DA5913298447421AF0A17829D83ED6252AE1D40DA7FA36A78787" +url = "https://distribute.openpype.io/thirdparty/oiio_tools-2.2.20-linux-centos7.tgz" +hash = "be1abf8a50e9da5913298447421af0a17829d83ed6252ae1d40da7fa36a78787" [openpype.thirdparty.oiio.darwin] url = "https://distribute.openpype.io/thirdparty/oiio-2.2.0-darwin.tgz" From 1d6fff34746c34e76775d59118c1748a75ff1031 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= Date: Thu, 23 Jun 2022 15:11:18 +0200 Subject: [PATCH 33/52] :memo: update requirements for oiio on centos 7 --- website/docs/dev_requirements.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/website/docs/dev_requirements.md b/website/docs/dev_requirements.md index a10aea78651..eb4b1322977 100644 --- a/website/docs/dev_requirements.md +++ b/website/docs/dev_requirements.md @@ -87,9 +87,11 @@ This can also be hosted on the cloud in fully distributed deployments. - [**Avalon**](https://github.com/getavalon) - [**Pyblish**](https://github.com/pyblish) - [**OpenTimelineIO**](https://github.com/PixarAnimationStudios/OpenTimelineIO) -- [**OpenImageIO**](https://github.com/OpenImageIO/oiio) +- [**OpenImageIO**](https://github.com/OpenImageIO/oiio) [^centos7] - [**FFmpeg**](https://github.com/FFmpeg/FFmpeg) +[^centos7]: On Centos 7 you need to install additional libraries to support OIIO there - mainly boost +and libraw (`sudo yum install boost-1.53.0` and `sudo yum install LibRaw`) ### Python modules we use and their licenses From 59fee838079b1cbc887d36b1021360235cb09f72 Mon Sep 17 00:00:00 2001 From: Petr Dvorak Date: Fri, 24 Jun 2022 11:35:39 +0200 Subject: [PATCH 34/52] Changes in schema settings for AE and Harmony --- .../system_settings/applications.json | 18 ++------- .../host_settings/schema_aftereffects.json | 38 +++++++------------ .../host_settings/schema_harmony.json | 38 +++++++------------ 3 files changed, 31 insertions(+), 63 deletions(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index 6c90a996610..b70b59b95bc 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -986,8 +986,6 @@ }, "variants": { "21": { - "enabled": true, - "variant_label": "21", "executables": { "windows": [ "c:\\Program Files (x86)\\Toon Boom Animation\\Toon Boom Harmony 21 Premium\\win64\\bin\\HarmonyPremium.exe" @@ -1005,8 +1003,6 @@ "environment": {} }, "20": { - "enabled": true, - "variant_label": "20", "executables": { "windows": [ "c:\\Program Files (x86)\\Toon Boom Animation\\Toon Boom Harmony 20 Premium\\win64\\bin\\HarmonyPremium.exe" @@ -1024,8 +1020,6 @@ "environment": {} }, "17": { - "enabled": true, - "variant_label": "17", "executables": { "windows": [ "c:\\Program Files (x86)\\Toon Boom Animation\\Toon Boom Harmony 17 Premium\\win64\\bin\\HarmonyPremium.exe" @@ -1155,11 +1149,9 @@ }, "variants": { "2020": { - "enabled": true, - "variant_label": "2020", "executables": { "windows": [ - "" + "C:\\Program Files\\Adobe\\Adobe After Effects 2020\\Support Files\\Photoshop.exe" ], "darwin": [], "linux": [] @@ -1172,11 +1164,9 @@ "environment": {} }, "2021": { - "enabled": true, - "variant_label": "2021", "executables": { "windows": [ - "C:\\Program Files\\Adobe\\Adobe After Effects 2021\\Support Files\\AfterFX.exe" + "C:\\Program Files\\Adobe\\Adobe After Effects 2021\\Support Files\\Photoshop.exe" ], "darwin": [], "linux": [] @@ -1189,11 +1179,9 @@ "environment": {} }, "2022": { - "enabled": true, - "variant_label": "2022", "executables": { "windows": [ - "C:\\Program Files\\Adobe\\Adobe After Effects 2022\\Support Files\\AfterFX.exe" + "C:\\Program Files\\Adobe\\Adobe After Effects 2022\\Support Files\\Photoshop.exe" ], "darwin": [], "linux": [] diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json index 334c9aa235c..b92a2edf85f 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_aftereffects.json @@ -20,31 +20,21 @@ "type": "raw-json" }, { - "type": "dict", + "type": "dict-modifiable", "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": [ - { - "app_variant_label": "2020", - "app_variant": "2020", - "variant_skip_paths": ["use_python_2"] - }, - { - "app_variant_label": "2021", - "app_variant": "2021", - "variant_skip_paths": ["use_python_2"] - }, - { - "app_variant_label": "2022", - "app_variant": "2022", - "variant_skip_paths": ["use_python_2"] - } - ] - } - ] + "collapsible_key": true, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items", + "skip_paths": ["use_python_2"] + } + ] + } } ] } diff --git a/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json b/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json index 69ce7735e88..d5d041d0c22 100644 --- a/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json +++ b/openpype/settings/entities/schemas/system_schema/host_settings/schema_harmony.json @@ -20,31 +20,21 @@ "type": "raw-json" }, { - "type": "dict", + "type": "dict-modifiable", "key": "variants", - "children": [ - { - "type": "schema_template", - "name": "template_host_variant", - "template_data": [ - { - "app_variant_label": "21", - "app_variant": "21", - "variant_skip_paths": ["use_python_2"] - }, - { - "app_variant_label": "20", - "app_variant": "20", - "variant_skip_paths": ["use_python_2"] - }, - { - "app_variant_label": "17", - "app_variant": "17", - "variant_skip_paths": ["use_python_2"] - } - ] - } - ] + "collapsible_key": true, + "use_label_wrap": false, + "object_type": { + "type": "dict", + "collapsible": true, + "children": [ + { + "type": "schema_template", + "name": "template_host_variant_items", + "skip_paths": ["use_python_2"] + } + ] + } } ] } From a50f73ab7f495d901fdecfa5b4ca1b3f6589160d Mon Sep 17 00:00:00 2001 From: 64qam Date: Fri, 24 Jun 2022 13:43:08 +0200 Subject: [PATCH 35/52] Apply suggestions from code review Co-authored-by: Jakub Trllo <43494761+iLLiCiTiT@users.noreply.github.com> --- .../settings/defaults/system_settings/applications.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/openpype/settings/defaults/system_settings/applications.json b/openpype/settings/defaults/system_settings/applications.json index b70b59b95bc..30b0a5cbe3e 100644 --- a/openpype/settings/defaults/system_settings/applications.json +++ b/openpype/settings/defaults/system_settings/applications.json @@ -1151,7 +1151,7 @@ "2020": { "executables": { "windows": [ - "C:\\Program Files\\Adobe\\Adobe After Effects 2020\\Support Files\\Photoshop.exe" + "C:\\Program Files\\Adobe\\Adobe After Effects 2020\\Support Files\\AfterFX.exe" ], "darwin": [], "linux": [] @@ -1166,7 +1166,7 @@ "2021": { "executables": { "windows": [ - "C:\\Program Files\\Adobe\\Adobe After Effects 2021\\Support Files\\Photoshop.exe" + "C:\\Program Files\\Adobe\\Adobe After Effects 2021\\Support Files\\AfterFX.exe" ], "darwin": [], "linux": [] @@ -1181,7 +1181,7 @@ "2022": { "executables": { "windows": [ - "C:\\Program Files\\Adobe\\Adobe After Effects 2022\\Support Files\\Photoshop.exe" + "C:\\Program Files\\Adobe\\Adobe After Effects 2022\\Support Files\\AfterFX.exe" ], "darwin": [], "linux": [] From 87a71842cacc409007e789fcb4dbf889869ed83d Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 24 Jun 2022 14:50:29 +0200 Subject: [PATCH 36/52] use nuke api to get expected output files instead of guessing based on files in output directory --- .../plugins/publish/extract_render_local.py | 27 ++++++++++++------- .../nuke/plugins/publish/precollect_writes.py | 25 +++++++++++++---- 2 files changed, 38 insertions(+), 14 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/extract_render_local.py b/openpype/hosts/nuke/plugins/publish/extract_render_local.py index 057bca11ac1..1595fe03fb0 100644 --- a/openpype/hosts/nuke/plugins/publish/extract_render_local.py +++ b/openpype/hosts/nuke/plugins/publish/extract_render_local.py @@ -42,12 +42,22 @@ def process(self, instance): self.log.info("Start frame: {}".format(first_frame)) self.log.info("End frame: {}".format(last_frame)) - # write node url might contain nuke's ctl expressin - # as [python ...]/path... - path = node["file"].evaluate() + node_file = node["file"] + # Collecte expected filepaths for each frame + # - for cases that output is still image is first created set of + # paths which is then sorted and converted to list + expected_paths = list(sorted({ + node_file.evaluate(frame) + for frame in range(first_frame, last_frame + 1) + })) + # Extract only filenames for representation + filenames = [ + os.path.basename(filepath) + for filepath in expected_paths + ] # Ensure output directory exists. - out_dir = os.path.dirname(path) + out_dir = os.path.dirname(expected_paths[0]) if not os.path.exists(out_dir): os.makedirs(out_dir) @@ -67,12 +77,11 @@ def process(self, instance): if "representations" not in instance.data: instance.data["representations"] = [] - collected_frames = os.listdir(out_dir) - if len(collected_frames) == 1: + if len(filenames) == 1: repre = { 'name': ext, 'ext': ext, - 'files': collected_frames.pop(), + 'files': filenames[0], "stagingDir": out_dir } else: @@ -81,7 +90,7 @@ def process(self, instance): 'ext': ext, 'frameStart': "%0{}d".format( len(str(last_frame))) % first_frame, - 'files': collected_frames, + 'files': filenames, "stagingDir": out_dir } instance.data["representations"].append(repre) @@ -105,7 +114,7 @@ def process(self, instance): families.remove('still.local') instance.data["families"] = families - collections, remainder = clique.assemble(collected_frames) + collections, remainder = clique.assemble(filenames) self.log.info('collections: {}'.format(str(collections))) if collections: diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index a7c07975e2c..a267652f11d 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -56,9 +56,21 @@ def process(self, instance): first_frame = int(node["first"].getValue()) last_frame = int(node["last"].getValue()) - # get path - path = nuke.filename(node) - output_dir = os.path.dirname(path) + # Prepare expected output paths by evaluating each frame of write node + # - paths are first collected to set to avoid duplicated paths, then + # sorted and converted to list + node_file = node["file"] + expected_paths = list(sorted({ + node_file.evaluate(frame) + for frame in range(first_frame, last_frame + 1) + })) + expected_filenames = [ + os.path.basename(filepath) + for filepath in expected_paths + ] + + output_dir = os.path.dirname(expected_paths[0]) + self.log.debug('output dir: {}'.format(output_dir)) # create label @@ -83,8 +95,11 @@ def process(self, instance): } try: - collected_frames = [f for f in os.listdir(output_dir) - if ext in f] + collected_frames = [ + filename + for filename in os.listdir(output_dir) + if filename in expected_filenames + ] if collected_frames: collected_frames_len = len(collected_frames) frame_start_str = "%0{}d".format( From 4c6677e3ce80ef55e08a13099f3918314f891abe Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 24 Jun 2022 15:01:16 +0200 Subject: [PATCH 37/52] fix missing path variable --- openpype/hosts/nuke/plugins/publish/precollect_writes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/hosts/nuke/plugins/publish/precollect_writes.py b/openpype/hosts/nuke/plugins/publish/precollect_writes.py index a267652f11d..049958bd07d 100644 --- a/openpype/hosts/nuke/plugins/publish/precollect_writes.py +++ b/openpype/hosts/nuke/plugins/publish/precollect_writes.py @@ -68,8 +68,8 @@ def process(self, instance): os.path.basename(filepath) for filepath in expected_paths ] - - output_dir = os.path.dirname(expected_paths[0]) + path = nuke.filename(node) + output_dir = os.path.dirname(path) self.log.debug('output dir: {}'.format(output_dir)) From ca38d5d484f217ecaaf71888bf67c44707850d4c Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Fri, 24 Jun 2022 15:19:03 +0200 Subject: [PATCH 38/52] fix typo --- openpype/hosts/houdini/plugins/create/create_hda.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/houdini/plugins/create/create_hda.py b/openpype/hosts/houdini/plugins/create/create_hda.py index d15d5bcd297..b98da8b8bb1 100644 --- a/openpype/hosts/houdini/plugins/create/create_hda.py +++ b/openpype/hosts/houdini/plugins/create/create_hda.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import hou -from openpye.client import ( +from openpype.client import ( get_asset_by_name, get_subsets, ) From ae0427bbad4db877472799b207cf1db206eadd76 Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 24 Jun 2022 16:49:37 +0200 Subject: [PATCH 39/52] :bug: fix loading and updating vbd/bgeo sequences --- .../hosts/houdini/plugins/load/load_bgeo.py | 6 ++--- .../hosts/houdini/plugins/load/load_vdb.py | 27 +++++++------------ 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/openpype/hosts/houdini/plugins/load/load_bgeo.py b/openpype/hosts/houdini/plugins/load/load_bgeo.py index a463d513832..1c0cb81bee0 100644 --- a/openpype/hosts/houdini/plugins/load/load_bgeo.py +++ b/openpype/hosts/houdini/plugins/load/load_bgeo.py @@ -70,7 +70,6 @@ def format_path(path, is_sequence): # The path is either a single file or sequence in a folder. if not is_sequence: filename = path - print("single") else: filename = re.sub(r"(.*)\.(\d+)\.(bgeo.*)", "\\1.$F4.\\3", path) @@ -94,9 +93,10 @@ def update(self, container, representation): # Update the file path file_path = get_representation_path(representation) - file_path = self.format_path(file_path) + is_sequence = bool(representation["context"].get("frame")) + file_path = self.format_path(file_path, is_sequence) - file_node.setParms({"fileName": file_path}) + file_node.setParms({"file": file_path}) # Update attribute node.setParms({"representation": str(representation["_id"])}) diff --git a/openpype/hosts/houdini/plugins/load/load_vdb.py b/openpype/hosts/houdini/plugins/load/load_vdb.py index 9455b76b89f..efbac334ab6 100644 --- a/openpype/hosts/houdini/plugins/load/load_vdb.py +++ b/openpype/hosts/houdini/plugins/load/load_vdb.py @@ -31,6 +31,7 @@ def load(self, context, name=None, namespace=None, data=None): # Create a new geo node container = obj.createNode("geo", node_name=node_name) + is_sequence = bool(context["representation"]["context"].get("frame")) # Remove the file node, it only loads static meshes # Houdini 17 has removed the file node from the geo node @@ -40,7 +41,7 @@ def load(self, context, name=None, namespace=None, data=None): # Explicitly create a file node file_node = container.createNode("file", node_name=node_name) - file_node.setParms({"file": self.format_path(self.fname)}) + file_node.setParms({"file": self.format_path(self.fname, is_sequence)}) # Set display on last node file_node.setDisplayFlag(True) @@ -57,30 +58,19 @@ def load(self, context, name=None, namespace=None, data=None): suffix="", ) - def format_path(self, path): + @staticmethod + def format_path(path, is_sequence): """Format file path correctly for single vdb or vdb sequence.""" if not os.path.exists(path): raise RuntimeError("Path does not exist: %s" % path) # The path is either a single file or sequence in a folder. - is_single_file = os.path.isfile(path) - if is_single_file: + if not is_sequence: filename = path else: - # The path points to the publish .vdb sequence folder so we - # find the first file in there that ends with .vdb - files = sorted(os.listdir(path)) - first = next((x for x in files if x.endswith(".vdb")), None) - if first is None: - raise RuntimeError( - "Couldn't find first .vdb file of " - "sequence in: %s" % path - ) + filename = re.sub(r"(.*)\.(\d+)\.vdb$", "\\1.$F4.vdb", path) - # Set .vdb to $F.vdb - first = re.sub(r"\.(\d+)\.vdb$", ".$F.vdb", first) - - filename = os.path.join(path, first) + filename = os.path.join(path, filename) filename = os.path.normpath(filename) filename = filename.replace("\\", "/") @@ -100,7 +90,8 @@ def update(self, container, representation): # Update the file path file_path = get_representation_path(representation) - file_path = self.format_path(file_path) + is_sequence = bool(representation["context"].get("frame")) + file_path = self.format_path(file_path, is_sequence) file_node.setParms({"file": file_path}) From 8412fca0b1d7786417623770f3bb866a732154be Mon Sep 17 00:00:00 2001 From: Ondrej Samohel Date: Fri, 24 Jun 2022 17:41:44 +0200 Subject: [PATCH 40/52] :recycle: refactor format function --- openpype/hosts/houdini/plugins/load/load_bgeo.py | 9 +++++---- openpype/hosts/houdini/plugins/load/load_vdb.py | 10 +++++----- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/openpype/hosts/houdini/plugins/load/load_bgeo.py b/openpype/hosts/houdini/plugins/load/load_bgeo.py index 1c0cb81bee0..b298d423bc4 100644 --- a/openpype/hosts/houdini/plugins/load/load_bgeo.py +++ b/openpype/hosts/houdini/plugins/load/load_bgeo.py @@ -44,7 +44,8 @@ def load(self, context, name=None, namespace=None, data=None): # Explicitly create a file node file_node = container.createNode("file", node_name=node_name) - file_node.setParms({"file": self.format_path(self.fname, is_sequence)}) + file_node.setParms( + {"file": self.format_path(self.fname, context["representation"])}) # Set display on last node file_node.setDisplayFlag(True) @@ -62,11 +63,12 @@ def load(self, context, name=None, namespace=None, data=None): ) @staticmethod - def format_path(path, is_sequence): + def format_path(path, representation): """Format file path correctly for single bgeo or bgeo sequence.""" if not os.path.exists(path): raise RuntimeError("Path does not exist: %s" % path) + is_sequence = bool(representation["context"].get("frame")) # The path is either a single file or sequence in a folder. if not is_sequence: filename = path @@ -93,8 +95,7 @@ def update(self, container, representation): # Update the file path file_path = get_representation_path(representation) - is_sequence = bool(representation["context"].get("frame")) - file_path = self.format_path(file_path, is_sequence) + file_path = self.format_path(file_path, representation) file_node.setParms({"file": file_path}) diff --git a/openpype/hosts/houdini/plugins/load/load_vdb.py b/openpype/hosts/houdini/plugins/load/load_vdb.py index efbac334ab6..c558a7a0e7a 100644 --- a/openpype/hosts/houdini/plugins/load/load_vdb.py +++ b/openpype/hosts/houdini/plugins/load/load_vdb.py @@ -31,7 +31,6 @@ def load(self, context, name=None, namespace=None, data=None): # Create a new geo node container = obj.createNode("geo", node_name=node_name) - is_sequence = bool(context["representation"]["context"].get("frame")) # Remove the file node, it only loads static meshes # Houdini 17 has removed the file node from the geo node @@ -41,7 +40,8 @@ def load(self, context, name=None, namespace=None, data=None): # Explicitly create a file node file_node = container.createNode("file", node_name=node_name) - file_node.setParms({"file": self.format_path(self.fname, is_sequence)}) + file_node.setParms( + {"file": self.format_path(self.fname, context["representation"])}) # Set display on last node file_node.setDisplayFlag(True) @@ -59,11 +59,12 @@ def load(self, context, name=None, namespace=None, data=None): ) @staticmethod - def format_path(path, is_sequence): + def format_path(path, representation): """Format file path correctly for single vdb or vdb sequence.""" if not os.path.exists(path): raise RuntimeError("Path does not exist: %s" % path) + is_sequence = bool(representation["context"].get("frame")) # The path is either a single file or sequence in a folder. if not is_sequence: filename = path @@ -90,8 +91,7 @@ def update(self, container, representation): # Update the file path file_path = get_representation_path(representation) - is_sequence = bool(representation["context"].get("frame")) - file_path = self.format_path(file_path, is_sequence) + file_path = self.format_path(file_path, representation) file_node.setParms({"file": file_path}) From b4817f70a6b2058aff705ebc5dcae67ff2965c15 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 24 Jun 2022 19:11:21 +0200 Subject: [PATCH 41/52] show what is allowed to drop in the files widget --- openpype/style/style.css | 3 + .../widgets/attribute_defs/files_widget.py | 125 ++++++++++++++++-- 2 files changed, 117 insertions(+), 11 deletions(-) diff --git a/openpype/style/style.css b/openpype/style/style.css index d76d833be14..72d12a92309 100644 --- a/openpype/style/style.css +++ b/openpype/style/style.css @@ -1418,3 +1418,6 @@ InViewButton, InViewButton:disabled { InViewButton:hover { background: rgba(255, 255, 255, 37); } +SupportLabel { + color: {color:font-disabled}; +} diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 23cf8342b16..24e3f4bb251 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -26,26 +26,122 @@ EXT_ROLE = QtCore.Qt.UserRole + 8 +class SupportLabel(QtWidgets.QLabel): + pass + + class DropEmpty(QtWidgets.QWidget): - _drop_enabled_text = "Drag & Drop\n(drop files here)" + _empty_extensions = "Any file" - def __init__(self, parent): + def __init__(self, single_item, allow_sequences, parent): super(DropEmpty, self).__init__(parent) - label_widget = QtWidgets.QLabel(self._drop_enabled_text, self) - label_widget.setAlignment(QtCore.Qt.AlignCenter) - label_widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) + drop_label_widget = QtWidgets.QLabel("Drag & Drop files here", self) - layout = QtWidgets.QHBoxLayout(self) + detail_widget = QtWidgets.QWidget(self) + items_label_widget = SupportLabel(detail_widget) + extensions_label_widget = SupportLabel(detail_widget) + extensions_label_widget.setWordWrap(True) + + detail_layout = QtWidgets.QVBoxLayout(detail_widget) + detail_layout.setContentsMargins(0, 0, 0, 0) + detail_layout.addStretch(1) + detail_layout.addWidget( + items_label_widget, 0, alignment=QtCore.Qt.AlignCenter + ) + detail_layout.addWidget( + extensions_label_widget, 0, alignment=QtCore.Qt.AlignCenter + ) + + layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) layout.addSpacing(10) layout.addWidget( - label_widget, - alignment=QtCore.Qt.AlignCenter + drop_label_widget, 0, alignment=QtCore.Qt.AlignCenter ) + layout.addWidget(detail_widget, 1) layout.addSpacing(10) - self._label_widget = label_widget + for widget in ( + detail_widget, + drop_label_widget, + items_label_widget, + extensions_label_widget, + ): + if isinstance(widget, QtWidgets.QLabel): + widget.setAlignment(QtCore.Qt.AlignCenter) + widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) + + self._single_item = single_item + self._allow_sequences = allow_sequences + self._allowed_extensions = set() + self._allow_folders = None + + self._drop_label_widget = drop_label_widget + self._items_label_widget = items_label_widget + self._extensions_label_widget = extensions_label_widget + + self.set_allow_folders(False) + + def set_extensions(self, extensions): + if extensions: + extensions = { + ext.replace(".", "") + for ext in extensions + } + if extensions == self._allowed_extensions: + return + self._allowed_extensions = extensions + + self._update_items_label() + + def set_allow_folders(self, allowed): + if self._allow_folders == allowed: + return + + self._allow_folders = allowed + self._update_items_label() + + def _update_items_label(self): + extensions_label = "" + if self._allowed_extensions: + extensions_label = ", ".join(sorted(self._allowed_extensions)) + + allowed_items = [] + if self._allow_folders: + allowed_items.append("folder") + + if extensions_label: + allowed_items.append("file") + if self._allow_sequences: + allowed_items.append("sequence") + + num_label = "Single" + if not self._single_item: + num_label = "Multiple" + allowed_items = [item + "s" for item in allowed_items] + + if not allowed_items: + allowed_items_label = "" + elif len(allowed_items) == 1: + allowed_items_label = allowed_items[0] + elif len(allowed_items) == 2: + allowed_items_label = " or ".join(allowed_items) + else: + last_item = allowed_items.pop(-1) + new_last_item = " or ".join(last_item, allowed_items.pop(-1)) + allowed_items.append(new_last_item) + allowed_items_label = ", ".join(allowed_items) + + if allowed_items_label: + items_label = "{} {}".format(num_label, allowed_items_label) + if extensions_label: + items_label += " of" + else: + items_label = "It is not allowed to add anything here!" + + self._items_label_widget.setText(items_label) + self._extensions_label_widget.setText(extensions_label) def paintEvent(self, event): super(DropEmpty, self).paintEvent(event) @@ -188,7 +284,12 @@ def set_allow_folders(self, allow=None): def set_allowed_extensions(self, extensions=None): if extensions is not None: - extensions = set(extensions) + _extensions = set() + for ext in set(extensions): + if not ext.startswith("."): + ext = ".{}".format(ext) + _extensions.add(ext.lower()) + extensions = _extensions if self._allowed_extensions != extensions: self._allowed_extensions = extensions @@ -444,7 +545,7 @@ def __init__(self, single_item, allow_sequences, parent): super(FilesWidget, self).__init__(parent) self.setAcceptDrops(True) - empty_widget = DropEmpty(self) + empty_widget = DropEmpty(single_item, allow_sequences, self) files_model = FilesModel(single_item, allow_sequences) files_proxy_model = FilesProxyModel() @@ -519,6 +620,8 @@ def current_value(self): def set_filters(self, folders_allowed, exts_filter): self._files_proxy_model.set_allow_folders(folders_allowed) self._files_proxy_model.set_allowed_extensions(exts_filter) + self._empty_widget.set_extensions(exts_filter) + self._empty_widget.set_allow_folders(folders_allowed) def _on_rows_inserted(self, parent_index, start_row, end_row): for row in range(start_row, end_row + 1): From 462807c2726b0cff2e351db8edc759114fb52cc3 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Fri, 24 Jun 2022 19:19:25 +0200 Subject: [PATCH 42/52] removed unnecessary widgets --- .../widgets/attribute_defs/files_widget.py | 59 ++++++++----------- 1 file changed, 23 insertions(+), 36 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 24e3f4bb251..af5a1d130b2 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -38,20 +38,8 @@ def __init__(self, single_item, allow_sequences, parent): drop_label_widget = QtWidgets.QLabel("Drag & Drop files here", self) - detail_widget = QtWidgets.QWidget(self) - items_label_widget = SupportLabel(detail_widget) - extensions_label_widget = SupportLabel(detail_widget) - extensions_label_widget.setWordWrap(True) - - detail_layout = QtWidgets.QVBoxLayout(detail_widget) - detail_layout.setContentsMargins(0, 0, 0, 0) - detail_layout.addStretch(1) - detail_layout.addWidget( - items_label_widget, 0, alignment=QtCore.Qt.AlignCenter - ) - detail_layout.addWidget( - extensions_label_widget, 0, alignment=QtCore.Qt.AlignCenter - ) + items_label_widget = SupportLabel(self) + items_label_widget.setWordWrap(True) layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) @@ -59,17 +47,17 @@ def __init__(self, single_item, allow_sequences, parent): layout.addWidget( drop_label_widget, 0, alignment=QtCore.Qt.AlignCenter ) - layout.addWidget(detail_widget, 1) + layout.addStretch(1) + layout.addWidget( + items_label_widget, 0, alignment=QtCore.Qt.AlignCenter + ) layout.addSpacing(10) for widget in ( - detail_widget, drop_label_widget, items_label_widget, - extensions_label_widget, ): - if isinstance(widget, QtWidgets.QLabel): - widget.setAlignment(QtCore.Qt.AlignCenter) + widget.setAlignment(QtCore.Qt.AlignCenter) widget.setAttribute(QtCore.Qt.WA_TranslucentBackground) self._single_item = single_item @@ -79,7 +67,6 @@ def __init__(self, single_item, allow_sequences, parent): self._drop_label_widget = drop_label_widget self._items_label_widget = items_label_widget - self._extensions_label_widget = extensions_label_widget self.set_allow_folders(False) @@ -103,27 +90,29 @@ def set_allow_folders(self, allowed): self._update_items_label() def _update_items_label(self): - extensions_label = "" - if self._allowed_extensions: - extensions_label = ", ".join(sorted(self._allowed_extensions)) - allowed_items = [] if self._allow_folders: allowed_items.append("folder") - if extensions_label: + if self._allowed_extensions: allowed_items.append("file") if self._allow_sequences: allowed_items.append("sequence") - num_label = "Single" if not self._single_item: - num_label = "Multiple" allowed_items = [item + "s" for item in allowed_items] if not allowed_items: - allowed_items_label = "" - elif len(allowed_items) == 1: + self._items_label_widget.setText( + "It is not allowed to add anything here!" + ) + return + + items_label = "Multiple " + if self._single_item: + items_label = "Single " + + if len(allowed_items) == 1: allowed_items_label = allowed_items[0] elif len(allowed_items) == 2: allowed_items_label = " or ".join(allowed_items) @@ -133,15 +122,13 @@ def _update_items_label(self): allowed_items.append(new_last_item) allowed_items_label = ", ".join(allowed_items) - if allowed_items_label: - items_label = "{} {}".format(num_label, allowed_items_label) - if extensions_label: - items_label += " of" - else: - items_label = "It is not allowed to add anything here!" + items_label += allowed_items_label + if self._allowed_extensions: + items_label += " of\n{}".format( + ", ".join(sorted(self._allowed_extensions)) + ) self._items_label_widget.setText(items_label) - self._extensions_label_widget.setText(extensions_label) def paintEvent(self, event): super(DropEmpty, self).paintEvent(event) From 3e6856b9ba01856309088df81c7dd14e1e72bf98 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Sat, 25 Jun 2022 03:51:29 +0000 Subject: [PATCH 43/52] [Automated] Bump version --- CHANGELOG.md | 38 ++++++++++++++------------------------ openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 16 insertions(+), 26 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f0a9a9651de..aa720137a3c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,29 +1,39 @@ # Changelog -## [3.11.2-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.11.1...HEAD) ### 📖 Documentation +- Linux: update OIIO package [\#3401](https://github.com/pypeclub/OpenPype/pull/3401) - General: Add ability to change user value for templates [\#3366](https://github.com/pypeclub/OpenPype/pull/3366) - Feature/multiverse [\#3350](https://github.com/pypeclub/OpenPype/pull/3350) **🚀 Enhancements** - Hosts: More options for in-host callbacks [\#3357](https://github.com/pypeclub/OpenPype/pull/3357) -- TVPaint: Extractor use mark in/out range to render [\#3308](https://github.com/pypeclub/OpenPype/pull/3308) - Maya: Allow more data to be published along camera 🎥 [\#3304](https://github.com/pypeclub/OpenPype/pull/3304) **🐛 Bug fixes** +- Nuke: Collect representation files based on Write [\#3407](https://github.com/pypeclub/OpenPype/pull/3407) +- General: Filter representations before integration start [\#3398](https://github.com/pypeclub/OpenPype/pull/3398) +- Maya: look collector typo [\#3392](https://github.com/pypeclub/OpenPype/pull/3392) - TVPaint: Make sure exit code is set to not None [\#3382](https://github.com/pypeclub/OpenPype/pull/3382) - Maya: vray device aspect ratio fix [\#3381](https://github.com/pypeclub/OpenPype/pull/3381) - Harmony: added unc path to zifile command in Harmony [\#3372](https://github.com/pypeclub/OpenPype/pull/3372) +- Standalone: settings improvements [\#3355](https://github.com/pypeclub/OpenPype/pull/3355) +- Nuke: Load full model hierarchy by default [\#3328](https://github.com/pypeclub/OpenPype/pull/3328) **🔀 Refactored code** +- Kitsu: renaming to plural func sync\_all\_projects [\#3397](https://github.com/pypeclub/OpenPype/pull/3397) +- Hiero: Use client query functions [\#3393](https://github.com/pypeclub/OpenPype/pull/3393) +- Nuke: Use client query functions [\#3391](https://github.com/pypeclub/OpenPype/pull/3391) +- Maya: Use client query functions [\#3385](https://github.com/pypeclub/OpenPype/pull/3385) - Harmony: Use client query functions [\#3378](https://github.com/pypeclub/OpenPype/pull/3378) +- Celaction: Use client query functions [\#3376](https://github.com/pypeclub/OpenPype/pull/3376) - Photoshop: Use client query functions [\#3375](https://github.com/pypeclub/OpenPype/pull/3375) - AfterEffects: Use client query functions [\#3374](https://github.com/pypeclub/OpenPype/pull/3374) - TVPaint: Use client query functions [\#3340](https://github.com/pypeclub/OpenPype/pull/3340) @@ -49,7 +59,6 @@ - Ftrack: Removed requirement of pypeclub role from default settings [\#3354](https://github.com/pypeclub/OpenPype/pull/3354) - Kitsu: Prevent crash on missing frames information [\#3352](https://github.com/pypeclub/OpenPype/pull/3352) - Ftrack: Open browser from tray [\#3320](https://github.com/pypeclub/OpenPype/pull/3320) -- Enhancement: More control over thumbnail processing. [\#3259](https://github.com/pypeclub/OpenPype/pull/3259) **🐛 Bug fixes** @@ -63,6 +72,7 @@ - nuke: adding extract thumbnail settings 3.10 [\#3347](https://github.com/pypeclub/OpenPype/pull/3347) - General: Fix last version function [\#3345](https://github.com/pypeclub/OpenPype/pull/3345) - Deadline: added OPENPYPE\_MONGO to filter [\#3336](https://github.com/pypeclub/OpenPype/pull/3336) +- Nuke: fixing farm publishing if review is disabled [\#3306](https://github.com/pypeclub/OpenPype/pull/3306) **🔀 Refactored code** @@ -90,15 +100,13 @@ - General: Updated windows oiio tool [\#3268](https://github.com/pypeclub/OpenPype/pull/3268) - Unreal: add support for skeletalMesh and staticMesh to loaders [\#3267](https://github.com/pypeclub/OpenPype/pull/3267) - Maya: reference loaders could store placeholder in referenced url [\#3264](https://github.com/pypeclub/OpenPype/pull/3264) -- TVPaint: Init file for TVPaint worker also handle guideline images [\#3250](https://github.com/pypeclub/OpenPype/pull/3250) -- Nuke: Change default icon path in settings [\#3247](https://github.com/pypeclub/OpenPype/pull/3247) **🐛 Bug fixes** +- General: Handle empty source key on instance [\#3342](https://github.com/pypeclub/OpenPype/pull/3342) - Houdini: Fix Houdini VDB manage update wrong file attribute name [\#3322](https://github.com/pypeclub/OpenPype/pull/3322) - Nuke: anatomy compatibility issue hacks [\#3321](https://github.com/pypeclub/OpenPype/pull/3321) - hiero: otio p3 compatibility issue - metadata on effect use update 3.11 [\#3314](https://github.com/pypeclub/OpenPype/pull/3314) -- Nuke: fixing farm publishing if review is disabled [\#3306](https://github.com/pypeclub/OpenPype/pull/3306) - General: Vendorized modules for Python 2 and update poetry lock [\#3305](https://github.com/pypeclub/OpenPype/pull/3305) - Fix - added local targets to install host [\#3303](https://github.com/pypeclub/OpenPype/pull/3303) - Settings: Add missing default settings for nuke gizmo [\#3301](https://github.com/pypeclub/OpenPype/pull/3301) @@ -109,9 +117,6 @@ - Hiero: add support for task tags 3.10.x [\#3279](https://github.com/pypeclub/OpenPype/pull/3279) - General: Fix Oiio tool path resolving [\#3278](https://github.com/pypeclub/OpenPype/pull/3278) - Maya: Fix udim support for e.g. uppercase \ tag [\#3266](https://github.com/pypeclub/OpenPype/pull/3266) -- Nuke: bake reformat was failing on string type [\#3261](https://github.com/pypeclub/OpenPype/pull/3261) -- Maya: hotfix Pxr multitexture in looks [\#3260](https://github.com/pypeclub/OpenPype/pull/3260) -- Unreal: Fix Camera Loading if Layout is missing [\#3255](https://github.com/pypeclub/OpenPype/pull/3255) **🔀 Refactored code** @@ -127,21 +132,6 @@ [Full Changelog](https://github.com/pypeclub/OpenPype/compare/CI/3.10.0-nightly.6...3.10.0) -**🚀 Enhancements** - -- Maya: FBX camera export [\#3253](https://github.com/pypeclub/OpenPype/pull/3253) -- General: updating common vendor `scriptmenu` to 1.5.2 [\#3246](https://github.com/pypeclub/OpenPype/pull/3246) - -**🐛 Bug fixes** - -- nuke: use framerange issue [\#3254](https://github.com/pypeclub/OpenPype/pull/3254) -- Ftrack: Chunk sizes for queries has minimal condition [\#3244](https://github.com/pypeclub/OpenPype/pull/3244) - -**Merged pull requests:** - -- Harmony: message length in 21.1 [\#3257](https://github.com/pypeclub/OpenPype/pull/3257) -- Harmony: 21.1 fix [\#3249](https://github.com/pypeclub/OpenPype/pull/3249) - ## [3.9.8](https://github.com/pypeclub/OpenPype/tree/3.9.8) (2022-05-19) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.9.7...3.9.8) diff --git a/openpype/version.py b/openpype/version.py index 79e3b445f97..a30bca9f0fa 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.11.2-nightly.1" +__version__ = "3.12.0-nightly.1" diff --git a/pyproject.toml b/pyproject.toml index 3a5acb84907..47e6453551c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.11.2-nightly.1" # OpenPype +version = "3.12.0-nightly.1" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From 070e1fbe7eeae73372439d4a479960d6f5f11701 Mon Sep 17 00:00:00 2001 From: "clement.hector" Date: Sat, 25 Jun 2022 17:40:02 +0200 Subject: [PATCH 44/52] change default project_folder_structure --- openpype/settings/defaults/project_settings/global.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/settings/defaults/project_settings/global.json b/openpype/settings/defaults/project_settings/global.json index 9c0c6f69585..68a7b4966db 100644 --- a/openpype/settings/defaults/project_settings/global.json +++ b/openpype/settings/defaults/project_settings/global.json @@ -333,7 +333,7 @@ ] } }, - "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets[ftrack.Library]\": {\"characters[ftrack]\": {}, \"locations[ftrack]\": {}}, \"shots[ftrack.Sequence]\": {\"scripts\": {}, \"editorial[ftrack.Folder]\": {}}}}", + "project_folder_structure": "{\"__project_root__\": {\"prod\": {}, \"resources\": {\"footage\": {\"plates\": {}, \"offline\": {}}, \"audio\": {}, \"art_dept\": {}}, \"editorial\": {}, \"assets\": {\"characters\": {}, \"locations\": {}}, \"shots\": {}}}", "sync_server": { "enabled": false, "config": { From 37c98c045e7047a6e35ee807ba476a916c04a84b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 27 Jun 2022 10:14:43 +0200 Subject: [PATCH 45/52] added spacing --- openpype/widgets/attribute_defs/files_widget.py | 1 + 1 file changed, 1 insertion(+) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index af5a1d130b2..3135da66917 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -47,6 +47,7 @@ def __init__(self, single_item, allow_sequences, parent): layout.addWidget( drop_label_widget, 0, alignment=QtCore.Qt.AlignCenter ) + layout.addSpacing(10) layout.addStretch(1) layout.addWidget( items_label_widget, 0, alignment=QtCore.Qt.AlignCenter From 798734fdd8906fc1f2c9fe67e131bc8f8b85ff25 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 27 Jun 2022 10:37:33 +0200 Subject: [PATCH 46/52] fix keyword argument --- openpype/hosts/nuke/api/lib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openpype/hosts/nuke/api/lib.py b/openpype/hosts/nuke/api/lib.py index 4b697472758..45a1e727039 100644 --- a/openpype/hosts/nuke/api/lib.py +++ b/openpype/hosts/nuke/api/lib.py @@ -777,7 +777,7 @@ def check_inventory_versions(): # Find representations based on found containers repre_docs = get_representations( project_name, - repre_ids=repre_ids, + representation_ids=repre_ids, fields=["_id", "parent"] ) # Store representations by id and collect version ids From bd07eaf262ddd017eb9fc80541df60b06e048a50 Mon Sep 17 00:00:00 2001 From: OpenPype Date: Mon, 27 Jun 2022 09:46:14 +0000 Subject: [PATCH 47/52] [Automated] Bump version --- CHANGELOG.md | 5 +++-- openpype/version.py | 2 +- pyproject.toml | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index aa720137a3c..d118bbff546 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Changelog -## [3.12.0-nightly.1](https://github.com/pypeclub/OpenPype/tree/HEAD) +## [3.12.0-nightly.2](https://github.com/pypeclub/OpenPype/tree/HEAD) [Full Changelog](https://github.com/pypeclub/OpenPype/compare/3.11.1...HEAD) @@ -8,7 +8,7 @@ - Linux: update OIIO package [\#3401](https://github.com/pypeclub/OpenPype/pull/3401) - General: Add ability to change user value for templates [\#3366](https://github.com/pypeclub/OpenPype/pull/3366) -- Feature/multiverse [\#3350](https://github.com/pypeclub/OpenPype/pull/3350) +- Multiverse: expose some settings to GUI [\#3350](https://github.com/pypeclub/OpenPype/pull/3350) **🚀 Enhancements** @@ -17,6 +17,7 @@ **🐛 Bug fixes** +- Nuke: Fix keyword argument in query function [\#3414](https://github.com/pypeclub/OpenPype/pull/3414) - Nuke: Collect representation files based on Write [\#3407](https://github.com/pypeclub/OpenPype/pull/3407) - General: Filter representations before integration start [\#3398](https://github.com/pypeclub/OpenPype/pull/3398) - Maya: look collector typo [\#3392](https://github.com/pypeclub/OpenPype/pull/3392) diff --git a/openpype/version.py b/openpype/version.py index a30bca9f0fa..02f928d83c8 100644 --- a/openpype/version.py +++ b/openpype/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- """Package declaring Pype version.""" -__version__ = "3.12.0-nightly.1" +__version__ = "3.12.0-nightly.2" diff --git a/pyproject.toml b/pyproject.toml index 47e6453551c..a159559763d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "OpenPype" -version = "3.12.0-nightly.1" # OpenPype +version = "3.12.0-nightly.2" # OpenPype description = "Open VFX and Animation pipeline with support." authors = ["OpenPype Team "] license = "MIT License" From c3ffa95eceb7023c9c57853ffe5eb86e95896cbe Mon Sep 17 00:00:00 2001 From: "Allan I. A" <76656700+Allan-I@users.noreply.github.com> Date: Mon, 27 Jun 2022 13:16:35 +0300 Subject: [PATCH 48/52] Fix pyenv typo --- website/docs/dev_build.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/dev_build.md b/website/docs/dev_build.md index c797326ce65..4e80f6e19d5 100644 --- a/website/docs/dev_build.md +++ b/website/docs/dev_build.md @@ -214,7 +214,7 @@ $ brew install cmake 3) Install [pyenv](https://github.com/pyenv/pyenv): ```shell $ brew install pyenv -$ echo 'eval "$(pypenv init -)"' >> ~/.zshrc +$ echo 'eval "$(pyenv init -)"' >> ~/.zshrc $ pyenv init $ exec "$SHELL" $ PATH=$(pyenv root)/shims:$PATH From 8aa5770c0cb8e72ad95d16169080284b025f4510 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Samohel?= <33513211+antirotor@users.noreply.github.com> Date: Mon, 27 Jun 2022 14:09:23 +0200 Subject: [PATCH 49/52] :bug: fix resurfacing of avalon import --- openpype/lib/path_tools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index a016aa5c25d..795866756a0 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -11,7 +11,7 @@ from .anatomy import Anatomy from .profiles_filtering import filter_profiles -import avalon.api +from openpype.pipeline import AvalonMongoDB log = logging.getLogger(__name__) @@ -204,7 +204,7 @@ def concatenate_splitted_paths(split_paths, anatomy): def get_format_data(anatomy): - dbcon = avalon.api.AvalonMongoDB() + dbcon = AvalonMongoDB() dbcon.Session["AVALON_PROJECT"] = anatomy.project_name project_doc = dbcon.find_one({"type": "project"}) project_code = project_doc["data"]["code"] From aa9183b2c20ff07485b3987e8fa84504833e13c1 Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 27 Jun 2022 14:23:42 +0200 Subject: [PATCH 50/52] use client query function 'get_project' --- openpype/lib/path_tools.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/openpype/lib/path_tools.py b/openpype/lib/path_tools.py index 795866756a0..caad20f4d64 100644 --- a/openpype/lib/path_tools.py +++ b/openpype/lib/path_tools.py @@ -6,13 +6,12 @@ import six import platform +from openpype.client import get_project from openpype.settings import get_project_settings from .anatomy import Anatomy from .profiles_filtering import filter_profiles -from openpype.pipeline import AvalonMongoDB - log = logging.getLogger(__name__) @@ -204,9 +203,7 @@ def concatenate_splitted_paths(split_paths, anatomy): def get_format_data(anatomy): - dbcon = AvalonMongoDB() - dbcon.Session["AVALON_PROJECT"] = anatomy.project_name - project_doc = dbcon.find_one({"type": "project"}) + project_doc = get_project(anatomy.project_name, fields=["data.code"]) project_code = project_doc["data"]["code"] return { From 1da6eef8d3501644cfb9751e45082aba85899e3b Mon Sep 17 00:00:00 2001 From: Jakub Trllo Date: Mon, 27 Jun 2022 14:54:20 +0200 Subject: [PATCH 51/52] fix subset name change on change of creator plugin --- openpype/tools/publisher/widgets/create_dialog.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openpype/tools/publisher/widgets/create_dialog.py b/openpype/tools/publisher/widgets/create_dialog.py index 53bbef8b755..3a68835dc73 100644 --- a/openpype/tools/publisher/widgets/create_dialog.py +++ b/openpype/tools/publisher/widgets/create_dialog.py @@ -977,7 +977,12 @@ def _set_creator(self, creator): elif variant: self.variant_hints_menu.addAction(variant) - self.variant_input.setText(default_variant or "Main") + variant_text = default_variant or "Main" + # Make sure subset name is updated to new plugin + if variant_text == self.variant_input.text(): + self._on_variant_change() + else: + self.variant_input.setText(variant_text) def _on_variant_widget_resize(self): self.variant_hints_btn.setFixedHeight(self.variant_input.height()) From 0c674fcc61a636ebfc5e888e0f0f782dffa366f1 Mon Sep 17 00:00:00 2001 From: Milan Kolar Date: Tue, 28 Jun 2022 09:15:09 +0200 Subject: [PATCH 52/52] expand spacing of the drop zone --- openpype/widgets/attribute_defs/files_widget.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/openpype/widgets/attribute_defs/files_widget.py b/openpype/widgets/attribute_defs/files_widget.py index 3135da66917..698a91a1a57 100644 --- a/openpype/widgets/attribute_defs/files_widget.py +++ b/openpype/widgets/attribute_defs/files_widget.py @@ -43,11 +43,11 @@ def __init__(self, single_item, allow_sequences, parent): layout = QtWidgets.QVBoxLayout(self) layout.setContentsMargins(0, 0, 0, 0) - layout.addSpacing(10) + layout.addSpacing(20) layout.addWidget( drop_label_widget, 0, alignment=QtCore.Qt.AlignCenter ) - layout.addSpacing(10) + layout.addSpacing(30) layout.addStretch(1) layout.addWidget( items_label_widget, 0, alignment=QtCore.Qt.AlignCenter