diff --git a/client/ayon_core/tools/publisher/models/publish.py b/client/ayon_core/tools/publisher/models/publish.py index da7b64ceae..ef207bfb79 100644 --- a/client/ayon_core/tools/publisher/models/publish.py +++ b/client/ayon_core/tools/publisher/models/publish.py @@ -4,7 +4,7 @@ import traceback import collections from functools import partial -from typing import Optional, Dict, List, Union, Any, Iterable, Literal +from typing import Optional, Dict, List, Union, Any, Iterable import arrow import pyblish.plugin @@ -22,15 +22,6 @@ # Define constant for plugin orders offset PLUGIN_ORDER_OFFSET = 0.5 -ActionFilterType = Literal[ - "all", - "notProcessed", - "processed", - "failed", - "warning", - "failedOrWarning", - "succeeded" -] class PublishReportMaker: @@ -318,8 +309,10 @@ class PublishPluginActionItem: action_id (str): Action id. plugin_id (str): Plugin id. active (bool): Action is active. - on_filter (ActionFilterType): Actions have 'on' attribute which define - when can be action triggered (e.g. 'all', 'failed', ...). + on_filter (Literal["all", "notProcessed", "processed", "failed", + "warning", "failedOrWarning", "succeeded"]): Actions have 'on' + attribute which define when can be action triggered + (e.g. 'all', 'failed', ...). label (str): Action's label. icon (Optional[str]) Action's icon. """ @@ -329,14 +322,14 @@ def __init__( action_id: str, plugin_id: str, active: bool, - on_filter: ActionFilterType, + on_filter: str, label: str, icon: Optional[str], ): self.action_id: str = action_id self.plugin_id: str = plugin_id self.active: bool = active - self.on_filter: ActionFilterType = on_filter + self.on_filter: str = on_filter self.label: str = label self.icon: Optional[str] = icon diff --git a/server_addon/nuke/client/ayon_nuke/__init__.py b/server_addon/nuke/client/ayon_nuke/__init__.py deleted file mode 100644 index 29ea039739..0000000000 --- a/server_addon/nuke/client/ayon_nuke/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from .version import __version__ -from .addon import ( - NUKE_ROOT_DIR, - NukeAddon, -) - - -__all__ = ( - "__version__", - - "NUKE_ROOT_DIR", - "NukeAddon", -) diff --git a/server_addon/nuke/client/ayon_nuke/addon.py b/server_addon/nuke/client/ayon_nuke/addon.py deleted file mode 100644 index ccb7379c0f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/addon.py +++ /dev/null @@ -1,74 +0,0 @@ -import os -import platform -from ayon_core.addon import AYONAddon, IHostAddon - -from .version import __version__ - -NUKE_ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) - - -class NukeAddon(AYONAddon, IHostAddon): - name = "nuke" - version = __version__ - host_name = "nuke" - - def add_implementation_envs(self, env, _app): - # Add requirements to NUKE_PATH - new_nuke_paths = [ - os.path.join(NUKE_ROOT_DIR, "startup") - ] - old_nuke_path = env.get("NUKE_PATH") or "" - for path in old_nuke_path.split(os.pathsep): - if not path: - continue - - norm_path = os.path.normpath(path) - if norm_path not in new_nuke_paths: - new_nuke_paths.append(norm_path) - - env["NUKE_PATH"] = os.pathsep.join(new_nuke_paths) - # Remove auto screen scale factor for Qt - # - let Nuke decide it's value - env.pop("QT_AUTO_SCREEN_SCALE_FACTOR", None) - # Remove tkinter library paths if are set - env.pop("TK_LIBRARY", None) - env.pop("TCL_LIBRARY", None) - - # Add vendor to PYTHONPATH - python_path = env["PYTHONPATH"] - python_path_parts = [] - if python_path: - python_path_parts = python_path.split(os.pathsep) - vendor_path = os.path.join(NUKE_ROOT_DIR, "vendor") - python_path_parts.insert(0, vendor_path) - env["PYTHONPATH"] = os.pathsep.join(python_path_parts) - - # Set default values if are not already set via settings - defaults = { - "LOGLEVEL": "DEBUG" - } - for key, value in defaults.items(): - if not env.get(key): - env[key] = value - - # Try to add QuickTime to PATH - quick_time_path = "C:/Program Files (x86)/QuickTime/QTSystem" - if platform.system() == "windows" and os.path.exists(quick_time_path): - path_value = env.get("PATH") or "" - path_paths = [ - path - for path in path_value.split(os.pathsep) - if path - ] - path_paths.append(quick_time_path) - env["PATH"] = os.pathsep.join(path_paths) - - def get_launch_hook_paths(self, app): - if app.host_name != self.host_name: - return [] - return [ - os.path.join(NUKE_ROOT_DIR, "hooks") - ] - - def get_workfile_extensions(self): - return [".nk"] diff --git a/server_addon/nuke/client/ayon_nuke/api/__init__.py b/server_addon/nuke/client/ayon_nuke/api/__init__.py deleted file mode 100644 index caefba766f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/__init__.py +++ /dev/null @@ -1,104 +0,0 @@ -from .workio import ( - file_extensions, - has_unsaved_changes, - save_file, - open_file, - current_file, - work_root, -) -from .command import ( - viewer_update_and_undo_stop -) -from .plugin import ( - NukeCreator, - NukeWriteCreator, - NukeCreatorError, - get_instance_group_node_childs, - get_colorspace_from_node -) -from .pipeline import ( - NukeHost, - - ls, - - list_instances, - remove_instance, - select_instance, - - containerise, - parse_container, - update_container, - -) -from .lib import ( - INSTANCE_DATA_KNOB, - ROOT_DATA_KNOB, - maintained_selection, - reset_selection, - select_nodes, - get_view_process_node, - duplicate_node, - convert_knob_value_to_correct_type, - get_node_data, - set_node_data, - update_node_data, - create_write_node, - link_knobs -) -from .utils import ( - colorspace_exists_on_node, - get_colorspace_list -) - -from .actions import ( - SelectInvalidAction, - SelectInstanceNodeAction -) - -__all__ = ( - "file_extensions", - "has_unsaved_changes", - "save_file", - "open_file", - "current_file", - "work_root", - - "viewer_update_and_undo_stop", - - "NukeCreator", - "NukeWriteCreator", - "NukeCreatorError", - "NukeHost", - "get_instance_group_node_childs", - "get_colorspace_from_node", - - "ls", - - "list_instances", - "remove_instance", - "select_instance", - - "containerise", - "parse_container", - "update_container", - - "INSTANCE_DATA_KNOB", - "ROOT_DATA_KNOB", - "maintained_selection", - "reset_selection", - "select_nodes", - "get_view_process_node", - "duplicate_node", - "convert_knob_value_to_correct_type", - "get_node_data", - "set_node_data", - "update_node_data", - "create_write_node", - "link_knobs", - - "colorspace_exists_on_node", - "get_colorspace_list", - - "SelectInvalidAction", - "SelectInstanceNodeAction" -) diff --git a/server_addon/nuke/client/ayon_nuke/api/actions.py b/server_addon/nuke/client/ayon_nuke/api/actions.py deleted file mode 100644 index a7bcb5b44f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/actions.py +++ /dev/null @@ -1,77 +0,0 @@ -import pyblish.api - -from ayon_core.pipeline.publish import get_errored_instances_from_context -from .lib import ( - reset_selection, - select_nodes -) - - -class SelectInvalidAction(pyblish.api.Action): - """Select invalid nodes in Nuke when plug-in failed. - - To retrieve the invalid nodes this assumes a static `get_invalid()` - method is available on the plugin. - - """ - label = "Select invalid nodes" - on = "failed" # This action is only available on a failed plug-in - icon = "search" # Icon from Awesome Icon - - def process(self, context, plugin): - - errored_instances = get_errored_instances_from_context(context, - plugin=plugin) - - # Get the invalid nodes for the plug-ins - self.log.info("Finding invalid nodes..") - invalid = set() - for instance in errored_instances: - invalid_nodes = plugin.get_invalid(instance) - - if invalid_nodes: - if isinstance(invalid_nodes, (list, tuple)): - invalid.update(invalid_nodes) - else: - self.log.warning("Plug-in returned to be invalid, " - "but has no selectable nodes.") - - if invalid: - self.log.info("Selecting invalid nodes: {}".format(invalid)) - reset_selection() - select_nodes(invalid) - else: - self.log.info("No invalid nodes found.") - - -class SelectInstanceNodeAction(pyblish.api.Action): - """Select instance node for failed plugin.""" - label = "Select instance node" - on = "failed" # This action is only available on a failed plug-in - icon = "mdi.cursor-default-click" - - def process(self, context, plugin): - - # Get the errored instances for the plug-in - errored_instances = get_errored_instances_from_context( - context, plugin) - - # Get the invalid nodes for the plug-ins - self.log.info("Finding instance nodes..") - nodes = set() - for instance in errored_instances: - instance_node = instance.data.get("transientData", {}).get("node") - if not instance_node: - raise RuntimeError( - "No transientData['node'] found on instance: {}".format( - instance - ) - ) - nodes.add(instance_node) - - if nodes: - self.log.info("Selecting instance nodes: {}".format(nodes)) - reset_selection() - select_nodes(nodes) - else: - self.log.info("No instance nodes found.") diff --git a/server_addon/nuke/client/ayon_nuke/api/command.py b/server_addon/nuke/client/ayon_nuke/api/command.py deleted file mode 100644 index 2f772469d8..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/command.py +++ /dev/null @@ -1,21 +0,0 @@ -import logging -import contextlib -import nuke - -log = logging.getLogger(__name__) - - -@contextlib.contextmanager -def viewer_update_and_undo_stop(): - """Lock viewer from updating and stop recording undo steps""" - try: - # stop active viewer to update any change - viewer = nuke.activeViewer() - if viewer: - viewer.stop() - else: - log.warning("No available active Viewer") - nuke.Undo.disable() - yield - finally: - nuke.Undo.enable() diff --git a/server_addon/nuke/client/ayon_nuke/api/constants.py b/server_addon/nuke/client/ayon_nuke/api/constants.py deleted file mode 100644 index 110199720f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/constants.py +++ /dev/null @@ -1,4 +0,0 @@ -import os - - -ASSIST = bool(os.getenv("NUKEASSIST")) diff --git a/server_addon/nuke/client/ayon_nuke/api/gizmo_menu.py b/server_addon/nuke/client/ayon_nuke/api/gizmo_menu.py deleted file mode 100644 index 435e4a5806..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/gizmo_menu.py +++ /dev/null @@ -1,92 +0,0 @@ -import os -import re -import nuke - -from ayon_core.lib import Logger - -log = Logger.get_logger(__name__) - - -class GizmoMenu(): - def __init__(self, title, icon=None): - - self.toolbar = self._create_toolbar_menu( - title, - icon=icon - ) - - self._script_actions = [] - - def _create_toolbar_menu(self, name, icon=None): - nuke_node_menu = nuke.menu("Nodes") - return nuke_node_menu.addMenu( - name, - icon=icon - ) - - def _make_menu_path(self, path, icon=None): - parent = self.toolbar - for folder in re.split(r"/|\\", path): - if not folder: - continue - existing_menu = parent.findItem(folder) - if existing_menu: - parent = existing_menu - else: - parent = parent.addMenu(folder, icon=icon) - - return parent - - def build_from_configuration(self, configuration): - for menu in configuration: - # Construct parent path else parent is toolbar - parent = self.toolbar - gizmo_toolbar_path = menu.get("gizmo_toolbar_path") - if gizmo_toolbar_path: - parent = self._make_menu_path(gizmo_toolbar_path) - - for item in menu["sub_gizmo_list"]: - assert isinstance(item, dict), "Configuration is wrong!" - - if not item.get("title"): - continue - - item_type = item.get("sourcetype") - - if item_type == "python": - parent.addCommand( - item["title"], - command=str(item["command"]), - icon=item.get("icon"), - shortcut=item.get("shortcut") - ) - elif item_type == "file": - parent.addCommand( - item['title'], - "nuke.createNode('{}')".format(item.get('file_name')), - shortcut=item.get('shortcut') - ) - - # add separator - # Special behavior for separators - elif item_type == "separator": - parent.addSeparator() - - # add submenu - # items should hold a collection of submenu items (dict) - elif item_type == "menu": - # assert "items" in item, "Menu is missing 'items' key" - parent.addMenu( - item['title'], - icon=item.get('icon') - ) - - def add_gizmo_path(self, gizmo_paths): - for gizmo_path in gizmo_paths: - if os.path.isdir(gizmo_path): - for folder in os.listdir(gizmo_path): - if os.path.isdir(os.path.join(gizmo_path, folder)): - nuke.pluginAddPath(os.path.join(gizmo_path, folder)) - nuke.pluginAddPath(gizmo_path) - else: - log.warning("This path doesn't exist: {}".format(gizmo_path)) diff --git a/server_addon/nuke/client/ayon_nuke/api/lib.py b/server_addon/nuke/client/ayon_nuke/api/lib.py deleted file mode 100644 index 6caaed3801..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/lib.py +++ /dev/null @@ -1,2967 +0,0 @@ -import os -import re -import json -import six -import functools -import warnings -import platform -import tempfile -import contextlib -from collections import OrderedDict - -import nuke -from qtpy import QtCore, QtWidgets -import ayon_api - -from ayon_core.host import HostDirmap -from ayon_core.tools.utils import host_tools -from ayon_core.pipeline.workfile.workfile_template_builder import ( - TemplateProfileNotFound -) -from ayon_core.lib import ( - env_value_to_bool, - Logger, - get_version_from_path, - StringTemplate, -) - -from ayon_core.settings import ( - get_project_settings, - get_current_project_settings, -) -from ayon_core.addon import AddonsManager -from ayon_core.pipeline.template_data import get_template_data_with_names -from ayon_core.pipeline import ( - Anatomy, - get_current_host_name, - get_current_project_name, - get_current_folder_path, - get_current_task_name, - AYON_INSTANCE_ID, - AVALON_INSTANCE_ID, -) -from ayon_core.pipeline.context_tools import ( - get_current_context_custom_workfile_template -) -from ayon_core.pipeline.colorspace import ( - get_current_context_imageio_config_preset -) -from ayon_core.pipeline.workfile import BuildWorkfile -from . import gizmo_menu -from .constants import ASSIST - -from .workio import save_file -from .utils import get_node_outputs - -log = Logger.get_logger(__name__) - -MENU_LABEL = os.getenv("AYON_MENU_LABEL") or "AYON" -NODE_TAB_NAME = MENU_LABEL -DATA_GROUP_KEY = "{}DataGroup".format(MENU_LABEL.capitalize()) -EXCLUDED_KNOB_TYPE_ON_READ = ( - 20, # Tab Knob - 26, # Text Knob (But for backward compatibility, still be read - # if value is not an empty string.) -) -JSON_PREFIX = "JSON:::" -ROOT_DATA_KNOB = "publish_context" -INSTANCE_DATA_KNOB = "publish_instance" - - -class DeprecatedWarning(DeprecationWarning): - pass - - -def deprecated(new_destination): - """Mark functions as deprecated. - - It will result in a warning being emitted when the function is used. - """ - - func = None - if callable(new_destination): - func = new_destination - new_destination = None - - def _decorator(decorated_func): - if new_destination is None: - warning_message = ( - " Please check content of deprecated function to figure out" - " possible replacement." - ) - else: - warning_message = " Please replace your usage with '{}'.".format( - new_destination - ) - - @functools.wraps(decorated_func) - def wrapper(*args, **kwargs): - warnings.simplefilter("always", DeprecatedWarning) - warnings.warn( - ( - "Call to deprecated function '{}'" - "\nFunction was moved or removed.{}" - ).format(decorated_func.__name__, warning_message), - category=DeprecatedWarning, - stacklevel=4 - ) - return decorated_func(*args, **kwargs) - return wrapper - - if func is None: - return _decorator - return _decorator(func) - - -class Context: - main_window = None - context_action_item = None - project_name = os.getenv("AYON_PROJECT_NAME") - # Workfile related code - workfiles_launched = False - workfiles_tool_timer = None - - # Seems unused - _project_entity = None - - -def get_main_window(): - """Acquire Nuke's main window""" - if Context.main_window is None: - - top_widgets = QtWidgets.QApplication.topLevelWidgets() - name = "Foundry::UI::DockMainWindow" - for widget in top_widgets: - if ( - widget.inherits("QMainWindow") - and widget.metaObject().className() == name - ): - Context.main_window = widget - break - return Context.main_window - - -def set_node_data(node, knobname, data): - """Write data to node invisible knob - - Will create new in case it doesn't exists - or update the one already created. - - Args: - node (nuke.Node): node object - knobname (str): knob name - data (dict): data to be stored in knob - """ - # if exists then update data - if knobname in node.knobs(): - update_node_data(node, knobname, data) - return - - # else create new - knob_value = JSON_PREFIX + json.dumps(data) - knob = nuke.String_Knob(knobname) - knob.setValue(knob_value) - knob.setFlag(nuke.INVISIBLE) - node.addKnob(knob) - - -def get_node_data(node, knobname): - """Read data from node. - - Args: - node (nuke.Node): node object - knobname (str): knob name - - Returns: - dict: data stored in knob - """ - if knobname not in node.knobs(): - return - - rawdata = node[knobname].getValue() - if ( - isinstance(rawdata, six.string_types) - and rawdata.startswith(JSON_PREFIX) - ): - try: - return json.loads(rawdata[len(JSON_PREFIX):]) - except json.JSONDecodeError: - return - - -def update_node_data(node, knobname, data): - """Update already present data. - - Args: - node (nuke.Node): node object - knobname (str): knob name - data (dict): data to update knob value - """ - knob = node[knobname] - node_data = get_node_data(node, knobname) or {} - node_data.update(data) - knob_value = JSON_PREFIX + json.dumps(node_data) - knob.setValue(knob_value) - - -class Knobby(object): - """[DEPRECATED] For creating knob which it's type isn't - mapped in `create_knobs` - - Args: - type (string): Nuke knob type name - value: Value to be set with `Knob.setValue`, put `None` if not required - flags (list, optional): Knob flags to be set with `Knob.setFlag` - *args: Args other than knob name for initializing knob class - - """ - - def __init__(self, type, value, flags=None, *args): - self.type = type - self.value = value - self.flags = flags or [] - self.args = args - - def create(self, name, nice=None): - knob_cls = getattr(nuke, self.type) - knob = knob_cls(name, nice, *self.args) - if self.value is not None: - knob.setValue(self.value) - for flag in self.flags: - knob.setFlag(flag) - return knob - - @staticmethod - def nice_naming(key): - """Convert camelCase name into UI Display Name""" - words = re.findall('[A-Z][^A-Z]*', key[0].upper() + key[1:]) - return " ".join(words) - - -def create_knobs(data, tab=None): - """Create knobs by data - - Depending on the type of each dict value and creates the correct Knob. - - Mapped types: - bool: nuke.Boolean_Knob - int: nuke.Int_Knob - float: nuke.Double_Knob - list: nuke.Enumeration_Knob - six.string_types: nuke.String_Knob - - dict: If it's a nested dict (all values are dict), will turn into - A tabs group. Or just a knobs group. - - Args: - data (dict): collection of attributes and their value - tab (string, optional): Knobs' tab name - - Returns: - list: A list of `nuke.Knob` objects - - """ - def nice_naming(key): - """Convert camelCase name into UI Display Name""" - words = re.findall('[A-Z][^A-Z]*', key[0].upper() + key[1:]) - return " ".join(words) - - # Turn key-value pairs into knobs - knobs = list() - - if tab: - knobs.append(nuke.Tab_Knob(tab)) - - for key, value in data.items(): - # Knob name - if isinstance(key, tuple): - name, nice = key - else: - name, nice = key, nice_naming(key) - - # Create knob by value type - if isinstance(value, Knobby): - knobby = value - knob = knobby.create(name, nice) - - elif isinstance(value, float): - knob = nuke.Double_Knob(name, nice) - knob.setValue(value) - - elif isinstance(value, bool): - knob = nuke.Boolean_Knob(name, nice) - knob.setValue(value) - knob.setFlag(nuke.STARTLINE) - - elif isinstance(value, int): - knob = nuke.Int_Knob(name, nice) - knob.setValue(value) - - elif isinstance(value, six.string_types): - knob = nuke.String_Knob(name, nice) - knob.setValue(value) - - elif isinstance(value, list): - knob = nuke.Enumeration_Knob(name, nice, value) - - elif isinstance(value, dict): - if all(isinstance(v, dict) for v in value.values()): - # Create a group of tabs - begain = nuke.BeginTabGroup_Knob() - end = nuke.EndTabGroup_Knob() - begain.setName(name) - end.setName(name + "_End") - knobs.append(begain) - for k, v in value.items(): - knobs += create_knobs(v, tab=k) - knobs.append(end) - else: - # Create a group of knobs - knobs.append(nuke.Tab_Knob( - name, nice, nuke.TABBEGINCLOSEDGROUP)) - knobs += create_knobs(value) - knobs.append( - nuke.Tab_Knob(name + "_End", nice, nuke.TABENDGROUP)) - continue - - else: - raise TypeError("Unsupported type: %r" % type(value)) - - knobs.append(knob) - - return knobs - - -def imprint(node, data, tab=None): - """Store attributes with value on node - - Parse user data into Node knobs. - Use `collections.OrderedDict` to ensure knob order. - - Args: - node(nuke.Node): node object from Nuke - data(dict): collection of attributes and their value - - Returns: - None - - Examples: - ``` - import nuke - from ayon_nuke.api import lib - - node = nuke.createNode("NoOp") - data = { - # Regular type of attributes - "myList": ["x", "y", "z"], - "myBool": True, - "myFloat": 0.1, - "myInt": 5, - - # Creating non-default imprint type of knob - "MyFilePath": lib.Knobby("File_Knob", "/file/path"), - "divider": lib.Knobby("Text_Knob", ""), - - # Manual nice knob naming - ("my_knob", "Nice Knob Name"): "some text", - - # dict type will be created as knob group - "KnobGroup": { - "knob1": 5, - "knob2": "hello", - "knob3": ["a", "b"], - }, - - # Nested dict will be created as tab group - "TabGroup": { - "tab1": {"count": 5}, - "tab2": {"isGood": True}, - "tab3": {"direction": ["Left", "Right"]}, - }, - } - lib.imprint(node, data, tab="Demo") - - ``` - - """ - for knob in create_knobs(data, tab): - # If knob name exists we set the value. Technically there could be - # multiple knobs with the same name, but the intent is not to have - # duplicated knobs so we do not account for that. - if knob.name() in node.knobs().keys(): - node[knob.name()].setValue(knob.value()) - else: - node.addKnob(knob) - - -@deprecated -def add_publish_knob(node): - """[DEPRECATED] Add Publish knob to node - - Arguments: - node (nuke.Node): nuke node to be processed - - Returns: - node (nuke.Node): processed nuke node - - """ - if "publish" not in node.knobs(): - body = OrderedDict() - body[("divd", "Publishing")] = Knobby("Text_Knob", '') - body["publish"] = True - imprint(node, body) - return node - - -@deprecated("ayon_nuke.api.lib.set_node_data") -def set_avalon_knob_data(node, data=None, prefix="avalon:"): - """[DEPRECATED] Sets data into nodes's avalon knob - - This function is still used but soon will be deprecated. - Use `set_node_data` instead. - - Arguments: - node (nuke.Node): Nuke node to imprint with data, - data (dict, optional): Data to be imprinted into AvalonTab - prefix (str, optional): filtering prefix - - Returns: - node (nuke.Node) - - Examples: - data = { - 'folderPath': 'sq020sh0280', - 'productType': 'render', - 'productName': 'productMain' - } - """ - data = data or dict() - create = OrderedDict() - - tab_name = NODE_TAB_NAME - editable = ["folderPath", "productName", "name", "namespace"] - - existed_knobs = node.knobs() - - for key, value in data.items(): - knob_name = prefix + key - gui_name = key - - if knob_name in existed_knobs: - # Set value - try: - node[knob_name].setValue(value) - except TypeError: - node[knob_name].setValue(str(value)) - else: - # New knob - name = (knob_name, gui_name) # Hide prefix on GUI - if key in editable: - create[name] = value - else: - create[name] = Knobby("String_Knob", - str(value), - flags=[nuke.READ_ONLY]) - if tab_name in existed_knobs: - tab_name = None - else: - tab = OrderedDict() - warn = Knobby("Text_Knob", "Warning! Do not change following data!") - divd = Knobby("Text_Knob", "") - head = [ - (("warn", ""), warn), - (("divd", ""), divd), - ] - tab[DATA_GROUP_KEY] = OrderedDict(head + list(create.items())) - create = tab - - imprint(node, create, tab=tab_name) - return node - - -@deprecated("ayon_nuke.api.lib.get_node_data") -def get_avalon_knob_data(node, prefix="avalon:", create=True): - """[DEPRECATED] Gets a data from nodes's avalon knob - - This function is still used but soon will be deprecated. - Use `get_node_data` instead. - - Arguments: - node (obj): Nuke node to search for data, - prefix (str, optional): filtering prefix - - Returns: - data (dict) - """ - - data = {} - if NODE_TAB_NAME not in node.knobs(): - return data - - # check if lists - if not isinstance(prefix, list): - prefix = [prefix] - - # loop prefix - for p in prefix: - # check if the node is avalon tracked - try: - # check if data available on the node - _ = node[DATA_GROUP_KEY].value() - except NameError: - # if it doesn't then create it - if create: - node = set_avalon_knob_data(node) - return get_avalon_knob_data(node) - return {} - - # get data from filtered knobs - data.update({k.replace(p, ''): node[k].value() - for k in node.knobs().keys() - if p in k}) - - return data - - -def add_write_node(name, file_path, knobs, **kwarg): - """Adding nuke write node - - Arguments: - name (str): nuke node name - kwarg (attrs): data for nuke knobs - - Returns: - node (obj): nuke write node - """ - use_range_limit = kwarg.get("use_range_limit", None) - - w = nuke.createNode( - "Write", - "name {}".format(name), - inpanel=False - ) - - w["file"].setValue(file_path) - - # finally add knob overrides - set_node_knobs_from_settings(w, knobs, **kwarg) - - if use_range_limit: - w["use_limit"].setValue(True) - w["first"].setValue(kwarg["frame_range"][0]) - w["last"].setValue(kwarg["frame_range"][1]) - - return w - - -def read_avalon_data(node): - """Return user-defined knobs from given `node` - - Args: - node (nuke.Node): Nuke node object - - Returns: - Dict[str, nuke.Knob]: A dictionary of knob name to nuke.Knob objects - - """ - def compat_prefixed(knob_name): - if knob_name.startswith("avalon:"): - return knob_name[len("avalon:"):] - elif knob_name.startswith("ak:"): - return knob_name[len("ak:"):] - - data = dict() - - pattern = ("(?<=addUserKnob {)" - "([0-9]*) (\\S*)" # Matching knob type and knob name - "(?=[ |}])") - tcl_script = node.writeKnobs(nuke.WRITE_USER_KNOB_DEFS) - result = re.search(pattern, tcl_script) - - if result: - first_user_knob = result.group(2) - # Collect user knobs from the end of the knob list - for knob in reversed(node.allKnobs()): - knob_name = knob.name() - if not knob_name: - # Ignore unnamed knob - continue - - knob_type = nuke.knob(knob.fullyQualifiedName(), type=True) - value = knob.value() - - if ( - knob_type not in EXCLUDED_KNOB_TYPE_ON_READ or - # For compating read-only string data that imprinted - # by `nuke.Text_Knob`. - (knob_type == 26 and value) - ): - key = compat_prefixed(knob_name) - if key is not None: - data[key] = value - - if knob_name == first_user_knob: - break - - return data - - -def get_node_path(path, padding=4): - """Get filename for the Nuke write with padded number as '#' - - Arguments: - path (str): The path to render to. - - Returns: - Tuple[str, int, str]: head, padding, tail (extension) - - Examples: - >>> get_frame_path("test.exr") - ('test', 4, '.exr') - - >>> get_frame_path("filename.#####.tif") - ('filename.', 5, '.tif') - - >>> get_frame_path("foobar##.tif") - ('foobar', 2, '.tif') - - >>> get_frame_path("foobar_%08d.tif") - ('foobar_', 8, '.tif') - """ - filename, ext = os.path.splitext(path) - - # Find a final number group - if '%' in filename: - match = re.match('.*?(%[0-9]+d)$', filename) - if match: - padding = int(match.group(1).replace('%', '').replace('d', '')) - # remove number from end since fusion - # will swap it with the frame number - filename = filename.replace(match.group(1), '') - elif '#' in filename: - match = re.match('.*?(#+)$', filename) - - if match: - padding = len(match.group(1)) - # remove number from end since fusion - # will swap it with the frame number - filename = filename.replace(match.group(1), '') - - return filename, padding, ext - - -def get_nuke_imageio_settings(): - return get_project_settings(Context.project_name)["nuke"]["imageio"] - - -def get_imageio_node_setting(node_class, plugin_name, product_name): - """Get preset data for dataflow (fileType, compression, bitDepth)""" - imageio_nodes = get_nuke_imageio_settings()["nodes"] - required_nodes = imageio_nodes["required_nodes"] - - imageio_node = None - for node in required_nodes: - log.info(node) - if ( - node_class in node["nuke_node_class"] - and plugin_name in node["plugins"] - ): - imageio_node = node - break - - if not imageio_node: - return - - # find overrides and update knobs with them - get_imageio_node_override_setting( - node_class, - plugin_name, - product_name, - imageio_node["knobs"] - ) - return imageio_node - - -def get_imageio_node_override_setting( - node_class, plugin_name, product_name, knobs_settings -): - """ Get imageio node overrides from settings - """ - imageio_nodes = get_nuke_imageio_settings()["nodes"] - override_nodes = imageio_nodes["override_nodes"] - - # find matching override node - override_imageio_node = None - for onode in override_nodes: - if node_class not in onode["nuke_node_class"]: - continue - - if plugin_name not in onode["plugins"]: - continue - - # TODO change 'subsets' to 'product_names' in settings - if ( - onode["subsets"] - and not any( - re.search(s.lower(), product_name.lower()) - for s in onode["subsets"] - ) - ): - continue - - override_imageio_node = onode - break - - # add overrides to imageio_node - if override_imageio_node: - # get all knob names in imageio_node - knob_names = [k["name"] for k in knobs_settings] - - for oknob in override_imageio_node["knobs"]: - oknob_name = oknob["name"] - oknob_type = oknob["type"] - oknob_value = oknob[oknob_type] - for knob in knobs_settings: - # add missing knobs into imageio_node - if oknob_name not in knob_names: - knobs_settings.append(oknob) - knob_names.append(oknob_name) - continue - - if oknob_name != knob["name"]: - continue - - knob_type = knob["type"] - # override matching knob name - if not oknob_value: - # remove original knob if no value found in oknob - knobs_settings.remove(knob) - else: - # override knob value with oknob's - knob[knob_type] = oknob_value - - return knobs_settings - - -def get_imageio_input_colorspace(filename): - """Get input file colorspace based on regex in settings.""" - imageio_regex_inputs = ( - get_nuke_imageio_settings()["regex_inputs"]["inputs"]) - - preset_clrsp = None - for regexInput in imageio_regex_inputs: - if bool(re.search(regexInput["regex"], filename)): - preset_clrsp = str(regexInput["colorspace"]) - - return preset_clrsp - - -def get_view_process_node(): - reset_selection() - - ipn_node = None - for v_ in nuke.allNodes(filter="Viewer"): - ipn = v_['input_process_node'].getValue() - ipn_node = nuke.toNode(ipn) - - # skip if no input node is set - if not ipn: - continue - - if ipn == "VIEWER_INPUT" and not ipn_node: - # since it is set by default we can ignore it - # nobody usually use this but use it if - # it exists in nodes - continue - - if not ipn_node: - # in case a Viewer node is transferred from - # different workfile with old values - raise NameError(( - "Input process node name '{}' set in " - "Viewer '{}' is doesn't exists in nodes" - ).format(ipn, v_.name())) - - ipn_node.setSelected(True) - - if ipn_node: - return duplicate_node(ipn_node) - - -def on_script_load(): - """Callback for ffmpeg support""" - if nuke.env["LINUX"]: - nuke.tcl('load ffmpegReader') - nuke.tcl('load ffmpegWriter') - else: - nuke.tcl('load movReader') - nuke.tcl('load movWriter') - - -def check_inventory_versions(): - """ - Actual version identifier of Loaded containers - - Any time this function is run it will check all nodes and filter only - Loader nodes for its version. It will get all versions from database - and check if the node is having actual version. If not then it will color - it to red. - """ - from .pipeline import parse_container - - # get all Loader nodes by avalon attribute metadata - node_with_repre_id = [] - repre_ids = set() - # Find all containers and collect its node and representation ids - for node in nuke.allNodes(): - container = parse_container(node) - - if container: - node = nuke.toNode(container["objectName"]) - avalon_knob_data = read_avalon_data(node) - repre_id = avalon_knob_data["representation"] - - repre_ids.add(repre_id) - node_with_repre_id.append((node, repre_id)) - - # Skip if nothing was found - if not repre_ids: - return - - project_name = get_current_project_name() - # Find representations based on found containers - repre_entities = ayon_api.get_representations( - project_name, - representation_ids=repre_ids, - fields={"id", "versionId"} - ) - # Store representations by id and collect version ids - repre_entities_by_id = {} - version_ids = set() - for repre_entity in repre_entities: - # Use stringed representation id to match value in containers - repre_id = repre_entity["id"] - repre_entities_by_id[repre_id] = repre_entity - version_ids.add(repre_entity["versionId"]) - - version_entities = ayon_api.get_versions( - project_name, - version_ids=version_ids, - fields={"id", "version", "productId"}, - ) - # Store versions by id and collect product ids - version_entities_by_id = {} - product_ids = set() - for version_entity in version_entities: - version_entities_by_id[version_entity["id"]] = version_entity - product_ids.add(version_entity["productId"]) - - # Query last versions based on product ids - last_versions_by_product_id = ayon_api.get_last_versions( - project_name, product_ids=product_ids, fields={"id", "productId"} - ) - - # Loop through collected container nodes and their representation ids - for item in node_with_repre_id: - # Some python versions of nuke can't unfold tuple in for loop - node, repre_id = item - repre_entity = repre_entities_by_id.get(repre_id) - # Failsafe for not finding the representation. - if not repre_entity: - log.warning(( - "Could not find the representation on node \"{}\"" - ).format(node.name())) - continue - - version_id = repre_entity["versionId"] - version_entity = version_entities_by_id.get(version_id) - if not version_entity: - log.warning(( - "Could not find the version on node \"{}\"" - ).format(node.name())) - continue - - # Get last version based on product id - product_id = version_entity["productId"] - last_version = last_versions_by_product_id[product_id] - # Check if last version is same as current version - if last_version["id"] == version_entity["id"]: - color_value = "0x4ecd25ff" - else: - color_value = "0xd84f20ff" - node["tile_color"].setValue(int(color_value, 16)) - - -def writes_version_sync(): - """Callback synchronizing version of publishable write nodes""" - try: - rootVersion = get_version_from_path(nuke.root().name()) - padding = len(rootVersion) - new_version = "v" + str("{" + ":0>{}".format(padding) + "}").format( - int(rootVersion) - ) - except Exception: - return - - for each in nuke.allNodes(filter="Write"): - # check if the node is avalon tracked - if NODE_TAB_NAME not in each.knobs(): - continue - - avalon_knob_data = read_avalon_data(each) - - try: - if avalon_knob_data["families"] not in ["render"]: - continue - - node_file = each["file"].value() - - node_version = "v" + get_version_from_path(node_file) - - node_new_file = node_file.replace(node_version, new_version) - each["file"].setValue(node_new_file) - if not os.path.isdir(os.path.dirname(node_new_file)): - log.warning("Path does not exist! I am creating it.") - os.makedirs(os.path.dirname(node_new_file)) - except Exception as e: - log.warning( - "Write node: `{}` has no version in path: {}".format( - each.name(), e)) - - -def version_up_script(): - """Raising working script's version""" - import nukescripts - nukescripts.script_and_write_nodes_version_up() - - -def check_product_name_exists(nodes, product_name): - """ - Checking if node is not already created to secure there is no duplicity - - Arguments: - nodes (list): list of nuke.Node objects - product_name (str): name we try to find - - Returns: - bool: True of False - """ - return next((True for n in nodes - if product_name in read_avalon_data(n).get("productName", "")), - False) - - -def format_anatomy(data): - """Helping function for formatting of anatomy paths - - Arguments: - data (dict): dictionary with attributes used for formatting - - Return: - str: Formatted path. - """ - - project_name = get_current_project_name() - anatomy = Anatomy(project_name) - - frame_padding = anatomy.templates_obj.frame_padding - - version = data.get("version") - if version is None: - file = script_name() - data["version"] = get_version_from_path(file) - - folder_path = data["folderPath"] - task_name = data["task"] - host_name = get_current_host_name() - - context_data = get_template_data_with_names( - project_name, folder_path, task_name, host_name - ) - data.update(context_data) - data.update({ - "subset": data["productName"], - "family": data["productType"], - "product": { - "name": data["productName"], - "type": data["productType"], - }, - "frame": "#" * frame_padding, - }) - return anatomy.format(data) - - -def script_name() -> str: - """Returns nuke script path""" - return nuke.root().knob("name").value() - - -def add_button_render_on_farm(node): - name = "renderOnFarm" - label = "Render On Farm" - value = ( - "from ayon_nuke.api.utils import submit_render_on_farm;" - "submit_render_on_farm(nuke.thisNode())" - ) - knob = nuke.PyScript_Knob(name, label, value) - knob.clearFlag(nuke.STARTLINE) - node.addKnob(knob) - - -def add_button_write_to_read(node): - name = "createReadNode" - label = "Read From Rendered" - value = "import write_to_read;\ - write_to_read.write_to_read(nuke.thisNode(), allow_relative=False)" - knob = nuke.PyScript_Knob(name, label, value) - knob.clearFlag(nuke.STARTLINE) - node.addKnob(knob) - - -def add_button_clear_rendered(node, path): - name = "clearRendered" - label = "Clear Rendered" - value = "import clear_rendered;\ - clear_rendered.clear_rendered(\"{}\")".format(path) - knob = nuke.PyScript_Knob(name, label, value) - node.addKnob(knob) - - -def create_prenodes( - prev_node, - nodes_setting, - plugin_name=None, - product_name=None, - **kwargs -): - last_node = None - for_dependency = {} - for node in nodes_setting: - # get attributes - name = node["name"] - nodeclass = node["nodeclass"] - knobs = node["knobs"] - - # create node - now_node = nuke.createNode( - nodeclass, - "name {}".format(name), - inpanel=False - ) - - # add for dependency linking - for_dependency[name] = { - "node": now_node, - "dependent": node["dependent"] - } - - if all([plugin_name, product_name]): - # find imageio overrides - get_imageio_node_override_setting( - now_node.Class(), - plugin_name, - product_name, - knobs - ) - - # add data to knob - set_node_knobs_from_settings(now_node, knobs, **kwargs) - - # switch actual node to previous - last_node = now_node - - for _node_name, node_prop in for_dependency.items(): - if not node_prop["dependent"]: - node_prop["node"].setInput( - 0, prev_node) - elif node_prop["dependent"] in for_dependency: - _prev_node = for_dependency[node_prop["dependent"]]["node"] - node_prop["node"].setInput( - 0, _prev_node) - else: - log.warning("Dependency has wrong name of node: {}".format( - node_prop - )) - - return last_node - - -def create_write_node( - name, - data, - input=None, - prenodes=None, - linked_knobs=None, - **kwargs -): - """Creating write node which is group node - - Arguments: - name (str): name of node - data (dict): creator write instance data - input (node)[optional]: selected node to connect to - prenodes (Optional[list[dict]]): nodes to be created before write - with dependency - review (bool)[optional]: adding review knob - farm (bool)[optional]: rendering workflow target - kwargs (dict)[optional]: additional key arguments for formatting - - Example: - prenodes = { - "nodeName": { - "nodeclass": "Reformat", - "dependent": [ - following_node_01, - ... - ], - "knobs": [ - { - "type": "text", - "name": "knobname", - "value": "knob value" - }, - ... - ] - }, - ... - } - - - Return: - node (nuke.Node): group node with avalon data as Knobs - """ - # Ensure name does not contain any invalid characters. - special_chars = re.escape("!@#$%^&*()=[]{}|\\;',.<>/?~+-") - special_chars_regex = re.compile(f"[{special_chars}]") - found_special_characters = list(special_chars_regex.findall(name)) - - msg = ( - f"Special characters found in name \"{name}\": " - f"{' '.join(found_special_characters)}" - ) - assert not found_special_characters, msg - - prenodes = prenodes or [] - - # filtering variables - plugin_name = data["creator"] - product_name = data["productName"] - - # get knob settings for write node - imageio_writes = get_imageio_node_setting( - node_class="Write", - plugin_name=plugin_name, - product_name=product_name - ) - - for knob in imageio_writes["knobs"]: - if knob["name"] == "file_type": - knot_type = knob["type"] - ext = knob[knot_type] - - data.update({ - "imageio_writes": imageio_writes, - "ext": ext - }) - anatomy_filled = format_anatomy(data) - - # build file path to workfiles - fdir = str( - anatomy_filled["work"]["default"]["directory"] - ).replace("\\", "/") - data["work"] = fdir - fpath = StringTemplate(data["fpath_template"]).format_strict(data) - - # create directory - if not os.path.isdir(os.path.dirname(fpath)): - log.warning("Path does not exist! I am creating it.") - os.makedirs(os.path.dirname(fpath)) - - GN = nuke.createNode("Group", "name {}".format(name)) - - prev_node = None - with GN: - if input: - input_name = str(input.name()).replace(" ", "") - # if connected input node was defined - prev_node = nuke.createNode( - "Input", - "name {}".format(input_name), - inpanel=False - ) - else: - # generic input node connected to nothing - prev_node = nuke.createNode( - "Input", - "name {}".format("rgba"), - inpanel=False - ) - - # creating pre-write nodes `prenodes` - last_prenode = create_prenodes( - prev_node, - prenodes, - plugin_name, - product_name, - **kwargs - ) - if last_prenode: - prev_node = last_prenode - - # creating write node - write_node = now_node = add_write_node( - "inside_{}".format(name), - fpath, - imageio_writes["knobs"], - **data - ) - # connect to previous node - now_node.setInput(0, prev_node) - - # switch actual node to previous - prev_node = now_node - - now_node = nuke.createNode("Output", "name Output1", inpanel=False) - - # connect to previous node - now_node.setInput(0, prev_node) - - # add divider - GN.addKnob(nuke.Text_Knob('', 'Rendering')) - - # Add linked knobs. - linked_knob_names = [] - - # add input linked knobs and create group only if any input - if linked_knobs: - linked_knob_names.append("_grp-start_") - linked_knob_names.extend(linked_knobs) - linked_knob_names.append("_grp-end_") - - linked_knob_names.append("Render") - - for _k_name in linked_knob_names: - if "_grp-start_" in _k_name: - knob = nuke.Tab_Knob( - "rnd_attr", "Rendering attributes", nuke.TABBEGINCLOSEDGROUP) - GN.addKnob(knob) - elif "_grp-end_" in _k_name: - knob = nuke.Tab_Knob( - "rnd_attr_end", "Rendering attributes", nuke.TABENDGROUP) - GN.addKnob(knob) - else: - if "___" in _k_name: - # add divider - GN.addKnob(nuke.Text_Knob("")) - else: - # add linked knob by _k_name - link = nuke.Link_Knob("") - link.makeLink(write_node.name(), _k_name) - link.setName(_k_name) - - # make render - if "Render" in _k_name: - link.setLabel("Render Local") - link.setFlag(0x1000) - GN.addKnob(link) - - # Adding render farm submission button. - if data.get("render_on_farm", False): - add_button_render_on_farm(GN) - - # adding write to read button - add_button_write_to_read(GN) - - # adding write to read button - add_button_clear_rendered(GN, os.path.dirname(fpath)) - - # set tile color - tile_color = next( - iter( - k[k["type"]] for k in imageio_writes["knobs"] - if "tile_color" in k["name"] - ), [255, 0, 0, 255] - ) - new_tile_color = [] - for c in tile_color: - if isinstance(c, float): - c = int(c * 255) - new_tile_color.append(c) - GN["tile_color"].setValue( - color_gui_to_int(new_tile_color)) - - return GN - - -def set_node_knobs_from_settings(node, knob_settings, **kwargs): - """Overriding knob values from settings - - Using `schema_nuke_knob_inputs` for knob type definitions. - - Args: - node (nuke.Node): nuke node - knob_settings (list): list of dict. Keys are `type`, `name`, `value` - kwargs (dict)[optional]: keys for formattable knob settings - """ - for knob in knob_settings: - knob_name = knob["name"] - if knob_name not in node.knobs(): - continue - - knob_type = knob["type"] - knob_value = knob[knob_type] - if knob_type == "expression": - node[knob_name].setExpression(knob_value) - continue - - # first deal with formattable knob settings - if knob_type == "formatable": - template = knob_value["template"] - to_type = knob_value["to_type"] - try: - knob_value = template.format(**kwargs) - except KeyError as msg: - raise KeyError( - "Not able to format expression: {}".format(msg)) - - # convert value to correct type - if to_type == "2d_vector": - knob_value = knob_value.split(";").split(",") - - knob_type = to_type - - if not knob_value: - continue - - knob_value = convert_knob_value_to_correct_type( - knob_type, knob_value) - - node[knob_name].setValue(knob_value) - - -def convert_knob_value_to_correct_type(knob_type, knob_value): - # Convert 'text' to string to avoid unicode - if knob_type == "text": - return str(knob_value) - - if knob_type == "boolean": - return bool(knob_value) - - if knob_type == "decimal_number": - return float(knob_value) - - if knob_type == "number": - return int(knob_value) - - if knob_type == "color_gui": - new_color = [] - for value in knob_value: - if isinstance(value, float): - value = int(value * 255) - new_color.append(value) - return color_gui_to_int(new_color) - - if knob_type == "box": - return [ - knob_value["x"], knob_value["y"], - knob_value["r"], knob_value["t"] - ] - - if knob_type == "vector_2d": - return [knob_value["x"], knob_value["y"]] - - if knob_type == "vector_3d": - return [knob_value["x"], knob_value["y"], knob_value["z"]] - - return knob_value - - -def color_gui_to_int(color_gui): - # Append alpha channel if not present - if len(color_gui) == 3: - color_gui = list(color_gui) + [255] - hex_value = ( - "0x{0:0>2x}{1:0>2x}{2:0>2x}{3:0>2x}").format(*color_gui) - return int(hex_value, 16) - - -def create_backdrop(label="", color=None, layer=0, - nodes=None): - """Create Backdrop node - - Arguments: - color (str): nuke compatible string with color code - layer (int): layer of node usually used (self.pos_layer - 1) - label (str): the message - nodes (list): list of nodes to be wrapped into backdrop - - Returns: - nuke.Node: The created backdrop node. - - """ - assert isinstance(nodes, list), "`nodes` should be a list of nodes" - - # Calculate bounds for the backdrop node. - bdX = min([node.xpos() for node in nodes]) - bdY = min([node.ypos() for node in nodes]) - bdW = max([node.xpos() + node.screenWidth() for node in nodes]) - bdX - bdH = max([node.ypos() + node.screenHeight() for node in nodes]) - bdY - - # Expand the bounds to leave a little border. Elements are offsets - # for left, top, right and bottom edges respectively - left, top, right, bottom = (-20, -65, 20, 60) - bdX += left - bdY += top - bdW += (right - left) - bdH += (bottom - top) - - bdn = nuke.createNode("BackdropNode") - bdn["z_order"].setValue(layer) - - if color: - bdn["tile_color"].setValue(int(color, 16)) - - bdn["xpos"].setValue(bdX) - bdn["ypos"].setValue(bdY) - bdn["bdwidth"].setValue(bdW) - bdn["bdheight"].setValue(bdH) - - if label: - bdn["label"].setValue(label) - - bdn["note_font_size"].setValue(20) - return bdn - - -class WorkfileSettings(object): - """ - All settings for workfile will be set - - This object is setting all possible root settings to the workfile. - Including Colorspace, Frame ranges, Resolution format. It can set it - to Root node or to any given node. - - Arguments: - root (node): nuke's root node - nodes (list): list of nuke's nodes - nodes_filter (list): filtering classes for nodes - - """ - - def __init__(self, root_node=None, nodes=None, **kwargs): - project_entity = kwargs.get("project") - if project_entity is None: - project_name = get_current_project_name() - project_entity = ayon_api.get_project(project_name) - else: - project_name = project_entity["name"] - - Context._project_entity = project_entity - self._project_name = project_name - self._folder_path = get_current_folder_path() - self._task_name = get_current_task_name() - self._folder_entity = ayon_api.get_folder_by_path( - project_name, self._folder_path - ) - self._root_node = root_node or nuke.root() - self._nodes = self.get_nodes(nodes=nodes) - - context_data = get_template_data_with_names( - project_name, self._folder_path, self._task_name, "nuke" - ) - self.formatting_data = context_data - - def get_nodes(self, nodes=None, nodes_filter=None): - - if not isinstance(nodes, list) and not isinstance(nodes_filter, list): - return [n for n in nuke.allNodes()] - elif not isinstance(nodes, list) and isinstance(nodes_filter, list): - nodes = list() - for filter in nodes_filter: - [nodes.append(n) for n in nuke.allNodes(filter=filter)] - return nodes - elif isinstance(nodes, list) and not isinstance(nodes_filter, list): - return [n for n in self._nodes] - elif isinstance(nodes, list) and isinstance(nodes_filter, list): - for filter in nodes_filter: - return [n for n in self._nodes if filter in n.Class()] - - def set_viewers_colorspace(self, imageio_nuke): - """Adds correct colorspace to viewer - - Arguments: - imageio_nuke (dict): nuke colorspace configurations - - """ - filter_knobs = [ - "viewerProcess", - "wipe_position", - "monitorOutOutputTransform" - ] - viewer_process = self._display_and_view_formatted( - imageio_nuke["viewer"] - ) - output_transform = self._display_and_view_formatted( - imageio_nuke["monitor"] - ) - erased_viewers = [] - for v in nuke.allNodes(filter="Viewer"): - # set viewProcess to preset from settings - v["viewerProcess"].setValue(viewer_process) - - if viewer_process not in v["viewerProcess"].value(): - copy_inputs = v.dependencies() - copy_knobs = { - k: v[k].value() for k in v.knobs() - if k not in filter_knobs - } - - # delete viewer with wrong settings - erased_viewers.append(v["name"].value()) - nuke.delete(v) - - # create new viewer - nv = nuke.createNode("Viewer") - - # connect to original inputs - for i, n in enumerate(copy_inputs): - nv.setInput(i, n) - - # set copied knobs - for k, v in copy_knobs.items(): - nv[k].setValue(v) - - # set viewerProcess - nv["viewerProcess"].setValue(viewer_process) - nv["monitorOutOutputTransform"].setValue(output_transform) - - if erased_viewers: - log.warning( - "Attention! Viewer nodes {} were erased." - "It had wrong color profile".format(erased_viewers)) - - def _display_and_view_formatted(self, view_profile): - """ Format display and view profile string - - Args: - view_profile (dict): view and display profile - - Returns: - str: formatted display and view profile string - """ - display_view = create_viewer_profile_string( - view_profile["view"], view_profile["display"], path_like=False - ) - # format any template tokens used in the string - return StringTemplate(display_view).format_strict(self.formatting_data) - - def set_root_colorspace(self, imageio_host): - """Adds correct colorspace to root - - Arguments: - imageio_host (dict): host colorspace configurations - - """ - config_data = get_current_context_imageio_config_preset() - - workfile_settings = imageio_host["workfile"] - color_management = workfile_settings["color_management"] - native_ocio_config = workfile_settings["native_ocio_config"] - - if not config_data: - # no ocio config found and no custom path used - if self._root_node["colorManagement"].value() \ - not in color_management: - self._root_node["colorManagement"].setValue(color_management) - - # second set ocio version - if self._root_node["OCIO_config"].value() \ - not in native_ocio_config: - self._root_node["OCIO_config"].setValue(native_ocio_config) - - else: - # OCIO config path is defined from prelaunch hook - self._root_node["colorManagement"].setValue("OCIO") - - # print previous settings in case some were found in workfile - residual_path = self._root_node["customOCIOConfigPath"].value() - if residual_path: - log.info("Residual OCIO config path found: `{}`".format( - residual_path - )) - - # set ocio config path - if config_data: - config_path = config_data["path"].replace("\\", "/") - log.info("OCIO config path found: `{}`".format( - config_path)) - - # check if there's a mismatch between environment and settings - correct_settings = self._is_settings_matching_environment( - config_data) - - # if there's no mismatch between environment and settings - if correct_settings: - self._set_ocio_config_path_to_workfile(config_data) - - workfile_settings_output = {} - # get monitor lut from settings respecting Nuke version differences - monitor_lut_data = self._get_monitor_settings( - workfile_settings["monitor_out_lut"], - workfile_settings["monitor_lut"] - ) - workfile_settings_output.update(monitor_lut_data) - workfile_settings_output.update( - { - "workingSpaceLUT": workfile_settings["working_space"], - "int8Lut": workfile_settings["int_8_lut"], - "int16Lut": workfile_settings["int_16_lut"], - "logLut": workfile_settings["log_lut"], - "floatLut": workfile_settings["float_lut"], - } - ) - - # then set the rest - for knob, value_ in workfile_settings_output.items(): - # skip unfilled ocio config path - # it will be dict in value - if isinstance(value_, dict): - continue - # skip empty values - if not value_: - continue - self._root_node[knob].setValue(str(value_)) - - def _get_monitor_settings(self, viewer_lut, monitor_lut): - """ Get monitor settings from viewer and monitor lut - - Args: - viewer_lut (str): viewer lut string - monitor_lut (str): monitor lut string - - Returns: - dict: monitor settings - """ - output_data = {} - m_display, m_viewer = get_viewer_config_from_string(monitor_lut) - v_display, v_viewer = get_viewer_config_from_string(viewer_lut) - - # set monitor lut differently for nuke version 14 - if nuke.NUKE_VERSION_MAJOR >= 14: - output_data["monitorOutLUT"] = create_viewer_profile_string( - m_viewer, m_display, path_like=False) - # monitorLut=thumbnails - viewerProcess makes more sense - output_data["monitorLut"] = create_viewer_profile_string( - v_viewer, v_display, path_like=False) - - if nuke.NUKE_VERSION_MAJOR == 13: - output_data["monitorOutLUT"] = create_viewer_profile_string( - m_viewer, m_display, path_like=False) - # monitorLut=thumbnails - viewerProcess makes more sense - output_data["monitorLut"] = create_viewer_profile_string( - v_viewer, v_display, path_like=True) - if nuke.NUKE_VERSION_MAJOR <= 12: - output_data["monitorLut"] = create_viewer_profile_string( - m_viewer, m_display, path_like=True) - - return output_data - - def _is_settings_matching_environment(self, config_data): - """ Check if OCIO config path is different from environment - - Args: - config_data (dict): OCIO config data from settings - - Returns: - bool: True if settings are matching environment, False otherwise - """ - current_ocio_path = os.environ["OCIO"] - settings_ocio_path = config_data["path"] - - # normalize all paths to forward slashes - current_ocio_path = current_ocio_path.replace("\\", "/") - settings_ocio_path = settings_ocio_path.replace("\\", "/") - - if current_ocio_path != settings_ocio_path: - message = """ -It seems like there's a mismatch between the OCIO config path set in your Nuke -settings and the actual path set in your OCIO environment. - -To resolve this, please follow these steps: -1. Close Nuke if it's currently open. -2. Reopen Nuke. - -Please note the paths for your reference: - -- The OCIO environment path currently set: - `{env_path}` - -- The path in your current Nuke settings: - `{settings_path}` - -Reopening Nuke should synchronize these paths and resolve any discrepancies. -""" - nuke.message( - message.format( - env_path=current_ocio_path, - settings_path=settings_ocio_path - ) - ) - return False - - return True - - def _set_ocio_config_path_to_workfile(self, config_data): - """ Set OCIO config path to workfile - - Path set into nuke workfile. It is trying to replace path with - environment variable if possible. If not, it will set it as it is. - It also saves the script to apply the change, but only if it's not - empty Untitled script. - - Args: - config_data (dict): OCIO config data from settings - - """ - # replace path with env var if possible - ocio_path = self._replace_ocio_path_with_env_var(config_data) - - log.info("Setting OCIO config path to: `{}`".format( - ocio_path)) - - self._root_node["customOCIOConfigPath"].setValue( - ocio_path - ) - self._root_node["OCIO_config"].setValue("custom") - - # only save script if it's not empty - if self._root_node["name"].value() != "": - log.info("Saving script to apply OCIO config path change.") - nuke.scriptSave() - - def _get_included_vars(self, config_template): - """ Get all environment variables included in template - - Args: - config_template (str): OCIO config template from settings - - Returns: - list: list of environment variables included in template - """ - # resolve all environments for whitelist variables - included_vars = [ - "BUILTIN_OCIO_ROOT", - ] - - # include all project root related env vars - for env_var in os.environ: - if env_var.startswith("AYON_PROJECT_ROOT_"): - included_vars.append(env_var) - - # use regex to find env var in template with format {ENV_VAR} - # this way we make sure only template used env vars are included - env_var_regex = r"\{([A-Z0-9_]+)\}" - env_var = re.findall(env_var_regex, config_template) - if env_var: - included_vars.append(env_var[0]) - - return included_vars - - def _replace_ocio_path_with_env_var(self, config_data): - """ Replace OCIO config path with environment variable - - Environment variable is added as TCL expression to path. TCL expression - is also replacing backward slashes found in path for windows - formatted values. - - Args: - config_data (str): OCIO config dict from settings - - Returns: - str: OCIO config path with environment variable TCL expression - """ - config_path = config_data["path"].replace("\\", "/") - config_template = config_data["template"] - - included_vars = self._get_included_vars(config_template) - - # make sure we return original path if no env var is included - new_path = config_path - - for env_var in included_vars: - env_path = os.getenv(env_var) - if not env_path: - continue - - # it has to be directory current process can see - if not os.path.isdir(env_path): - continue - - # make sure paths are in same format - env_path = env_path.replace("\\", "/") - path = config_path.replace("\\", "/") - - # check if env_path is in path and replace to first found positive - if env_path in path: - # with regsub we make sure path format of slashes is correct - resub_expr = ( - "[regsub -all {{\\\\}} [getenv {}] \"/\"]").format(env_var) - - new_path = path.replace( - env_path, resub_expr - ) - break - - return new_path - - def set_writes_colorspace(self): - """ Adds correct colorspace to write node dict - """ - for node in nuke.allNodes(filter="Group", group=self._root_node): - log.info("Setting colorspace to `{}`".format(node.name())) - - # get data from avalon knob - avalon_knob_data = read_avalon_data(node) - node_data = get_node_data(node, INSTANCE_DATA_KNOB) - - if ( - # backward compatibility - # TODO: remove this once old avalon data api will be removed - avalon_knob_data - and avalon_knob_data.get("id") not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - } - ): - continue - elif ( - node_data - and node_data.get("id") not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - } - ): - continue - - if ( - # backward compatibility - # TODO: remove this once old avalon data api will be removed - avalon_knob_data - and "creator" not in avalon_knob_data - ): - continue - elif ( - node_data - and "creator_identifier" not in node_data - ): - continue - - nuke_imageio_writes = None - if avalon_knob_data: - # establish families - product_type = avalon_knob_data.get("productType") - if product_type is None: - product_type = avalon_knob_data["family"] - families = [product_type] - if avalon_knob_data.get("families"): - families.append(avalon_knob_data.get("families")) - - nuke_imageio_writes = get_imageio_node_setting( - node_class=avalon_knob_data["families"], - plugin_name=avalon_knob_data["creator"], - product_name=avalon_knob_data["productName"] - ) - elif node_data: - nuke_imageio_writes = get_write_node_template_attr(node) - - if not nuke_imageio_writes: - return - - write_node = None - - # get into the group node - node.begin() - for x in nuke.allNodes(): - if x.Class() == "Write": - write_node = x - node.end() - - if not write_node: - return - - set_node_knobs_from_settings( - write_node, nuke_imageio_writes["knobs"]) - - def set_reads_colorspace(self, read_clrs_inputs): - """ Setting colorspace to Read nodes - - Looping through all read nodes and tries to set colorspace based - on regex rules in presets - """ - changes = {} - for n in nuke.allNodes(): - file = nuke.filename(n) - if n.Class() != "Read": - continue - - # check if any colorspace presets for read is matching - preset_clrsp = None - - for input in read_clrs_inputs: - if not bool(re.search(input["regex"], file)): - continue - preset_clrsp = input["colorspace"] - - if preset_clrsp is not None: - current = n["colorspace"].value() - future = str(preset_clrsp) - if current != future: - changes[n.name()] = { - "from": current, - "to": future - } - - if changes: - msg = "Read nodes are not set to correct colorspace:\n\n" - for nname, knobs in changes.items(): - msg += ( - " - node: '{0}' is now '{1}' but should be '{2}'\n" - ).format(nname, knobs["from"], knobs["to"]) - - msg += "\nWould you like to change it?" - - if nuke.ask(msg): - for nname, knobs in changes.items(): - n = nuke.toNode(nname) - n["colorspace"].setValue(knobs["to"]) - log.info( - "Setting `{0}` to `{1}`".format( - nname, - knobs["to"])) - - def set_colorspace(self): - """ Setting colorspace following presets - """ - # get imageio - nuke_colorspace = get_nuke_imageio_settings() - - log.info("Setting colorspace to workfile...") - try: - self.set_root_colorspace(nuke_colorspace) - except AttributeError as _error: - msg = "Set Colorspace to workfile error: {}".format(_error) - nuke.message(msg) - - log.info("Setting colorspace to viewers...") - try: - self.set_viewers_colorspace(nuke_colorspace) - except AttributeError as _error: - msg = "Set Colorspace to viewer error: {}".format(_error) - nuke.message(msg) - - log.info("Setting colorspace to write nodes...") - try: - self.set_writes_colorspace() - except AttributeError as _error: - nuke.message(_error) - log.error(_error) - - log.info("Setting colorspace to read nodes...") - read_clrs_inputs = nuke_colorspace["regex_inputs"].get("inputs", []) - if read_clrs_inputs: - self.set_reads_colorspace(read_clrs_inputs) - - def reset_frame_range_handles(self): - """Set frame range to current folder.""" - - if "attrib" not in self._folder_entity: - msg = "Folder {} don't have set any 'attrib'".format( - self._folder_path - ) - log.warning(msg) - nuke.message(msg) - return - - folder_attributes = self._folder_entity["attrib"] - - missing_cols = [] - check_cols = ["fps", "frameStart", "frameEnd", - "handleStart", "handleEnd"] - - for col in check_cols: - if col not in folder_attributes: - missing_cols.append(col) - - if len(missing_cols) > 0: - missing = ", ".join(missing_cols) - msg = "'{}' are not set for folder '{}'!".format( - missing, self._folder_path) - log.warning(msg) - nuke.message(msg) - return - - # get handles values - handle_start = folder_attributes["handleStart"] - handle_end = folder_attributes["handleEnd"] - frame_start = folder_attributes["frameStart"] - frame_end = folder_attributes["frameEnd"] - - fps = float(folder_attributes["fps"]) - frame_start_handle = frame_start - handle_start - frame_end_handle = frame_end + handle_end - - self._root_node["lock_range"].setValue(False) - self._root_node["fps"].setValue(fps) - self._root_node["first_frame"].setValue(frame_start_handle) - self._root_node["last_frame"].setValue(frame_end_handle) - self._root_node["lock_range"].setValue(True) - - # update node graph so knobs are updated - update_node_graph() - - frame_range = '{0}-{1}'.format(frame_start, frame_end) - - for node in nuke.allNodes(filter="Viewer"): - node['frame_range'].setValue(frame_range) - node['frame_range_lock'].setValue(True) - node['frame_range'].setValue(frame_range) - node['frame_range_lock'].setValue(True) - - if not ASSIST: - set_node_data( - self._root_node, - INSTANCE_DATA_KNOB, - { - "handleStart": int(handle_start), - "handleEnd": int(handle_end) - } - ) - else: - log.warning( - "NukeAssist mode is not allowing " - "updating custom knobs..." - ) - - def reset_resolution(self): - """Set resolution to project resolution.""" - log.info("Resetting resolution") - project_name = get_current_project_name() - folder_attributes = self._folder_entity["attrib"] - - format_data = { - "width": folder_attributes["resolutionWidth"], - "height": folder_attributes["resolutionHeight"], - "pixel_aspect": folder_attributes["pixelAspect"], - "name": project_name - } - - if any(x_ for x_ in format_data.values() if x_ is None): - msg = ("Missing set shot attributes in DB." - "\nContact your supervisor!." - "\n\nWidth: `{width}`" - "\nHeight: `{height}`" - "\nPixel Aspect: `{pixel_aspect}`").format(**format_data) - log.error(msg) - nuke.message(msg) - - existing_format = None - for format in nuke.formats(): - if format_data["name"] == format.name(): - existing_format = format - break - - if existing_format: - # Enforce existing format to be correct. - existing_format.setWidth(format_data["width"]) - existing_format.setHeight(format_data["height"]) - existing_format.setPixelAspect(format_data["pixel_aspect"]) - else: - format_string = self.make_format_string(**format_data) - log.info("Creating new format: {}".format(format_string)) - nuke.addFormat(format_string) - - nuke.root()["format"].setValue(format_data["name"]) - log.info("Format is set.") - - # update node graph so knobs are updated - update_node_graph() - - def make_format_string(self, **kwargs): - if kwargs.get("r"): - return ( - "{width} " - "{height} " - "{x} " - "{y} " - "{r} " - "{t} " - "{pixel_aspect:.2f} " - "{name}".format(**kwargs) - ) - else: - return ( - "{width} " - "{height} " - "{pixel_aspect:.2f} " - "{name}".format(**kwargs) - ) - - def set_context_settings(self): - # replace reset resolution from avalon core to pype's - self.reset_resolution() - # replace reset resolution from avalon core to pype's - self.reset_frame_range_handles() - # add colorspace menu item - self.set_colorspace() - - def set_favorites(self): - from .utils import set_context_favorites - - work_dir = os.getenv("AYON_WORKDIR") - # TODO validate functionality - # - does expect the structure is '{root}/{project}/{folder}' - # - this used asset name expecting it is unique in project - folder_path = get_current_folder_path() - folder_name = folder_path.split("/")[-1] - favorite_items = OrderedDict() - - # project - # get project's root and split to parts - projects_root = os.path.normpath(work_dir.split( - Context.project_name)[0]) - # add project name - project_dir = os.path.join(projects_root, Context.project_name) + "/" - # add to favorites - favorite_items.update({"Project dir": project_dir.replace("\\", "/")}) - - # folder - folder_root = os.path.normpath(work_dir.split( - folder_name)[0]) - # add folder name - folder_dir = os.path.join(folder_root, folder_name) + "/" - # add to favorites - favorite_items.update({"Shot dir": folder_dir.replace("\\", "/")}) - - # workdir - favorite_items.update({"Work dir": work_dir.replace("\\", "/")}) - - set_context_favorites(favorite_items) - - -def get_write_node_template_attr(node): - """ Gets all defined data from presets - """ - - # TODO: add identifiers to settings and rename settings key - plugin_names_mapping = { - "create_write_image": "CreateWriteImage", - "create_write_prerender": "CreateWritePrerender", - "create_write_render": "CreateWriteRender" - } - # get avalon data from node - node_data = get_node_data(node, INSTANCE_DATA_KNOB) - identifier = node_data["creator_identifier"] - - # return template data - product_name = node_data.get("productName") - if product_name is None: - product_name = node_data["subset"] - return get_imageio_node_setting( - node_class="Write", - plugin_name=plugin_names_mapping[identifier], - product_name=product_name - ) - - -def get_dependent_nodes(nodes): - """Get all dependent nodes connected to the list of nodes. - - Looking for connections outside of the nodes in incoming argument. - - Arguments: - nodes (list): list of nuke.Node objects - - Returns: - connections_in: dictionary of nodes and its dependencies - connections_out: dictionary of nodes and its dependency - """ - - connections_in = dict() - connections_out = dict() - node_names = [n.name() for n in nodes] - for node in nodes: - inputs = node.dependencies() - outputs = node.dependent() - # collect all inputs outside - test_in = [(i, n) for i, n in enumerate(inputs) - if n.name() not in node_names] - if test_in: - connections_in.update({ - node: test_in - }) - # collect all outputs outside - test_out = [i for i in outputs if i.name() not in node_names] - if test_out: - # only one dependent node is allowed - connections_out.update({ - node: test_out[-1] - }) - - return connections_in, connections_out - - -def update_node_graph(): - # Resetting frame will update knob values - try: - root_node_lock = nuke.root()["lock_range"].value() - nuke.root()["lock_range"].setValue(not root_node_lock) - nuke.root()["lock_range"].setValue(root_node_lock) - - current_frame = nuke.frame() - nuke.frame(1) - nuke.frame(int(current_frame)) - except Exception as error: - log.warning(error) - - -def find_free_space_to_paste_nodes( - nodes, - group=nuke.root(), - direction="right", - offset=300 -): - """ - For getting coordinates in DAG (node graph) for placing new nodes - - Arguments: - nodes (list): list of nuke.Node objects - group (nuke.Node) [optional]: object in which context it is - direction (str) [optional]: where we want it to be placed - [left, right, top, bottom] - offset (int) [optional]: what offset it is from rest of nodes - - Returns: - xpos (int): x coordinace in DAG - ypos (int): y coordinace in DAG - """ - if len(nodes) == 0: - return 0, 0 - - group_xpos = list() - group_ypos = list() - - # get local coordinates of all nodes - nodes_xpos = [n.xpos() for n in nodes] + \ - [n.xpos() + n.screenWidth() for n in nodes] - - nodes_ypos = [n.ypos() for n in nodes] + \ - [n.ypos() + n.screenHeight() for n in nodes] - - # get complete screen size of all nodes to be placed in - nodes_screen_width = max(nodes_xpos) - min(nodes_xpos) - nodes_screen_heigth = max(nodes_ypos) - min(nodes_ypos) - - # get screen size (r,l,t,b) of all nodes in `group` - with group: - group_xpos = [n.xpos() for n in nuke.allNodes() if n not in nodes] + \ - [n.xpos() + n.screenWidth() for n in nuke.allNodes() - if n not in nodes] - group_ypos = [n.ypos() for n in nuke.allNodes() if n not in nodes] + \ - [n.ypos() + n.screenHeight() for n in nuke.allNodes() - if n not in nodes] - - # calc output left - if direction in "left": - xpos = min(group_xpos) - abs(nodes_screen_width) - abs(offset) - ypos = min(group_ypos) - return xpos, ypos - # calc output right - if direction in "right": - xpos = max(group_xpos) + abs(offset) - ypos = min(group_ypos) - return xpos, ypos - # calc output top - if direction in "top": - xpos = min(group_xpos) - ypos = min(group_ypos) - abs(nodes_screen_heigth) - abs(offset) - return xpos, ypos - # calc output bottom - if direction in "bottom": - xpos = min(group_xpos) - ypos = max(group_ypos) + abs(offset) - return xpos, ypos - - -@contextlib.contextmanager -def maintained_selection(exclude_nodes=None): - """Maintain selection during context - - Maintain selection during context and unselect - all nodes after context is done. - - Arguments: - exclude_nodes (list[nuke.Node]): list of nodes to be unselected - before context is done - - Example: - >>> with maintained_selection(): - ... node["selected"].setValue(True) - >>> print(node["selected"].value()) - False - """ - if exclude_nodes: - for node in exclude_nodes: - node["selected"].setValue(False) - - previous_selection = nuke.selectedNodes() - - try: - yield - finally: - # unselect all selection in case there is some - reset_selection() - - # and select all previously selected nodes - if previous_selection: - select_nodes(previous_selection) - - -@contextlib.contextmanager -def swap_node_with_dependency(old_node, new_node): - """ Swap node with dependency - - Swap node with dependency and reconnect all inputs and outputs. - It removes old node. - - Arguments: - old_node (nuke.Node): node to be replaced - new_node (nuke.Node): node to replace with - - Example: - >>> old_node_name = old_node["name"].value() - >>> print(old_node_name) - old_node_name_01 - >>> with swap_node_with_dependency(old_node, new_node) as node_name: - ... new_node["name"].setValue(node_name) - >>> print(new_node["name"].value()) - old_node_name_01 - """ - # preserve position - xpos, ypos = old_node.xpos(), old_node.ypos() - # preserve selection after all is done - outputs = get_node_outputs(old_node) - inputs = old_node.dependencies() - node_name = old_node["name"].value() - - try: - nuke.delete(old_node) - - yield node_name - finally: - - # Reconnect inputs - for i, node in enumerate(inputs): - new_node.setInput(i, node) - # Reconnect outputs - if outputs: - for n, pipes in outputs.items(): - for i in pipes: - n.setInput(i, new_node) - # return to original position - new_node.setXYpos(xpos, ypos) - - -def reset_selection(): - """Deselect all selected nodes""" - for node in nuke.selectedNodes(): - node["selected"].setValue(False) - - -def select_nodes(nodes): - """Selects all inputted nodes - - Arguments: - nodes (Union[list, tuple, set]): nuke nodes to be selected - """ - assert isinstance(nodes, (list, tuple, set)), \ - "nodes has to be list, tuple or set" - - for node in nodes: - node["selected"].setValue(True) - - -def launch_workfiles_app(): - """Show workfiles tool on nuke launch. - - Trigger to show workfiles tool on application launch. Can be executed only - once all other calls are ignored. - - Workfiles tool show is deferred after application initialization using - QTimer. - """ - - if Context.workfiles_launched: - return - - Context.workfiles_launched = True - - # get all important settings - open_at_start = env_value_to_bool( - env_key="AYON_WORKFILE_TOOL_ON_START", - default=None) - - # return if none is defined - if not open_at_start: - return - - # Show workfiles tool using timer - # - this will be probably triggered during initialization in that case - # the application is not be able to show uis so it must be - # deferred using timer - # - timer should be processed when initialization ends - # When applications starts to process events. - timer = QtCore.QTimer() - timer.timeout.connect(_launch_workfile_app) - timer.setInterval(100) - Context.workfiles_tool_timer = timer - timer.start() - - -def _launch_workfile_app(): - # Safeguard to not show window when application is still starting up - # or is already closing down. - closing_down = QtWidgets.QApplication.closingDown() - starting_up = QtWidgets.QApplication.startingUp() - - # Stop the timer if application finished start up of is closing down - if closing_down or not starting_up: - Context.workfiles_tool_timer.stop() - Context.workfiles_tool_timer = None - - # Skip if application is starting up or closing down - if starting_up or closing_down: - return - - # Make sure on top is enabled on first show so the window is not hidden - # under main nuke window - # - this happened on Centos 7 and it is because the focus of nuke - # changes to the main window after showing because of initialization - # which moves workfiles tool under it - host_tools.show_workfiles(parent=None, on_top=True) - - -@deprecated("ayon_nuke.api.lib.start_workfile_template_builder") -def process_workfile_builder(): - """ [DEPRECATED] Process workfile builder on nuke start - - This function is deprecated and will be removed in future versions. - Use settings for `project_settings/nuke/templated_workfile_build` which are - supported by api `start_workfile_template_builder()`. - """ - - # to avoid looping of the callback, remove it! - nuke.removeOnCreate(process_workfile_builder, nodeClass="Root") - - # get state from settings - project_settings = get_current_project_settings() - workfile_builder = project_settings["nuke"].get( - "workfile_builder", {}) - - # get settings - create_fv_on = workfile_builder.get("create_first_version") or None - builder_on = workfile_builder.get("builder_on_start") or None - - last_workfile_path = os.environ.get("AYON_LAST_WORKFILE") - - # generate first version in file not existing and feature is enabled - if create_fv_on and not os.path.exists(last_workfile_path): - # get custom template path if any - custom_template_path = get_current_context_custom_workfile_template( - project_settings=project_settings - ) - - # if custom template is defined - if custom_template_path: - log.info("Adding nodes from `{}`...".format( - custom_template_path - )) - try: - # import nodes into current script - nuke.nodePaste(custom_template_path) - except RuntimeError: - raise RuntimeError(( - "Template defined for project: {} is not working. " - "Talk to your manager for an advise").format( - custom_template_path)) - - # if builder at start is defined - if builder_on: - log.info("Building nodes from presets...") - # build nodes by defined presets - BuildWorkfile().process() - - log.info("Saving script as version `{}`...".format( - last_workfile_path - )) - # safe file as version - save_file(last_workfile_path) - return - - -def start_workfile_template_builder(): - from .workfile_template_builder import ( - build_workfile_template - ) - - # remove callback since it would be duplicating the workfile - nuke.removeOnCreate(start_workfile_template_builder, nodeClass="Root") - - # to avoid looping of the callback, remove it! - log.info("Starting workfile template builder...") - try: - build_workfile_template(workfile_creation_enabled=True) - except TemplateProfileNotFound: - log.warning("Template profile not found. Skipping...") - - -def add_scripts_menu(): - try: - from scriptsmenu import launchfornuke - except ImportError: - log.warning( - "Skipping studio.menu install, because " - "'scriptsmenu' module seems unavailable." - ) - return - - # load configuration of custom menu - project_name = get_current_project_name() - project_settings = get_project_settings(project_name) - config = project_settings["nuke"]["scriptsmenu"]["definition"] - _menu = project_settings["nuke"]["scriptsmenu"]["name"] - - if not config: - log.warning("Skipping studio menu, no definition found.") - return - - # run the launcher for Maya menu - studio_menu = launchfornuke.main(title=_menu.title()) - - # apply configuration - studio_menu.build_from_configuration(studio_menu, config) - - -def add_scripts_gizmo(): - - # load configuration of custom menu - project_name = get_current_project_name() - project_settings = get_project_settings(project_name) - platform_name = platform.system().lower() - - for gizmo_settings in project_settings["nuke"]["gizmo"]: - gizmo_list_definition = gizmo_settings["gizmo_definition"] - toolbar_name = gizmo_settings["toolbar_menu_name"] - # gizmo_toolbar_path = gizmo_settings["gizmo_toolbar_path"] - gizmo_source_dir = gizmo_settings.get( - "gizmo_source_dir", {}).get(platform_name) - toolbar_icon_path = gizmo_settings.get( - "toolbar_icon_path", {}).get(platform_name) - - if not gizmo_source_dir: - log.debug("Skipping studio gizmo `{}`, " - "no gizmo path found.".format(toolbar_name) - ) - return - - if not gizmo_list_definition: - log.debug("Skipping studio gizmo `{}`, " - "no definition found.".format(toolbar_name) - ) - return - - if toolbar_icon_path: - try: - toolbar_icon_path = toolbar_icon_path.format(**os.environ) - except KeyError as e: - log.error( - "This environment variable doesn't exist: {}".format(e) - ) - - existing_gizmo_path = [] - for source_dir in gizmo_source_dir: - try: - resolve_source_dir = source_dir.format(**os.environ) - except KeyError as e: - log.error( - "This environment variable doesn't exist: {}".format(e) - ) - continue - if not os.path.exists(resolve_source_dir): - log.warning( - "The source of gizmo `{}` does not exists".format( - resolve_source_dir - ) - ) - continue - existing_gizmo_path.append(resolve_source_dir) - - # run the launcher for Nuke toolbar - toolbar_menu = gizmo_menu.GizmoMenu( - title=toolbar_name, - icon=toolbar_icon_path - ) - - # apply configuration - toolbar_menu.add_gizmo_path(existing_gizmo_path) - toolbar_menu.build_from_configuration(gizmo_list_definition) - - -class NukeDirmap(HostDirmap): - def __init__(self, file_name, *args, **kwargs): - """ - Args: - file_name (str): full path of referenced file from workfiles - *args (tuple): Positional arguments for 'HostDirmap' class - **kwargs (dict): Keyword arguments for 'HostDirmap' class - """ - - self.file_name = file_name - super(NukeDirmap, self).__init__(*args, **kwargs) - - def on_enable_dirmap(self): - pass - - def dirmap_routine(self, source_path, destination_path): - source_path = source_path.lower().replace(os.sep, '/') - destination_path = destination_path.lower().replace(os.sep, '/') - if platform.system().lower() == "windows": - self.file_name = self.file_name.lower().replace( - source_path, destination_path) - else: - self.file_name = self.file_name.replace( - source_path, destination_path) - - -class DirmapCache: - """Caching class to get settings and sitesync easily and only once.""" - _project_name = None - _project_settings = None - _sitesync_addon_discovered = False - _sitesync_addon = None - _mapping = None - - @classmethod - def project_name(cls): - if cls._project_name is None: - cls._project_name = os.getenv("AYON_PROJECT_NAME") - return cls._project_name - - @classmethod - def project_settings(cls): - if cls._project_settings is None: - cls._project_settings = get_project_settings(cls.project_name()) - return cls._project_settings - - @classmethod - def sitesync_addon(cls): - if not cls._sitesync_addon_discovered: - cls._sitesync_addon_discovered = True - cls._sitesync_addon = AddonsManager().get("sitesync") - return cls._sitesync_addon - - @classmethod - def mapping(cls): - return cls._mapping - - @classmethod - def set_mapping(cls, mapping): - cls._mapping = mapping - - -def dirmap_file_name_filter(file_name): - """Nuke callback function with single full path argument. - - Checks project settings for potential mapping from source to dest. - """ - - dirmap_processor = NukeDirmap( - file_name, - "nuke", - DirmapCache.project_name(), - DirmapCache.project_settings(), - DirmapCache.sitesync_addon(), - ) - if not DirmapCache.mapping(): - DirmapCache.set_mapping(dirmap_processor.get_mappings()) - - dirmap_processor.process_dirmap(DirmapCache.mapping()) - if os.path.exists(dirmap_processor.file_name): - return dirmap_processor.file_name - return file_name - - -@contextlib.contextmanager -def node_tempfile(): - """Create a temp file where node is pasted during duplication. - - This is to avoid using clipboard for node duplication. - """ - - tmp_file = tempfile.NamedTemporaryFile( - mode="w", prefix="openpype_nuke_temp_", suffix=".nk", delete=False - ) - tmp_file.close() - node_tempfile_path = tmp_file.name - - try: - # Yield the path where node can be copied - yield node_tempfile_path - - finally: - # Remove the file at the end - os.remove(node_tempfile_path) - - -def duplicate_node(node): - reset_selection() - - # select required node for duplication - node.setSelected(True) - - with node_tempfile() as filepath: - # copy selected to temp filepath - nuke.nodeCopy(filepath) - - # reset selection - reset_selection() - - # paste node and selection is on it only - dupli_node = nuke.nodePaste(filepath) - - # reset selection - reset_selection() - - return dupli_node - - -def get_group_io_nodes(nodes): - """Get the input and the output of a group of nodes.""" - - if not nodes: - raise ValueError("there is no nodes in the list") - - input_node = None - output_node = None - - if len(nodes) == 1: - input_node = output_node = nodes[0] - - else: - for node in nodes: - if "Input" in node.name(): - input_node = node - - if "Output" in node.name(): - output_node = node - - if input_node is not None and output_node is not None: - break - - if input_node is None: - log.warning("No Input found") - - if output_node is None: - log.warning("No Output found") - - return input_node, output_node - - -def get_extreme_positions(nodes): - """Get the 4 numbers that represent the box of a group of nodes.""" - - if not nodes: - raise ValueError("there is no nodes in the list") - - nodes_xpos = [n.xpos() for n in nodes] + \ - [n.xpos() + n.screenWidth() for n in nodes] - - nodes_ypos = [n.ypos() for n in nodes] + \ - [n.ypos() + n.screenHeight() for n in nodes] - - min_x, min_y = (min(nodes_xpos), min(nodes_ypos)) - max_x, max_y = (max(nodes_xpos), max(nodes_ypos)) - return min_x, min_y, max_x, max_y - - -def refresh_node(node): - """Correct a bug caused by the multi-threading of nuke. - - Refresh the node to make sure that it takes the desired attributes. - """ - - x = node.xpos() - y = node.ypos() - nuke.autoplaceSnap(node) - node.setXYpos(x, y) - - -def refresh_nodes(nodes): - for node in nodes: - refresh_node(node) - - -def get_names_from_nodes(nodes): - """Get list of nodes names. - - Args: - nodes(List[nuke.Node]): List of nodes to convert into names. - - Returns: - List[str]: Name of passed nodes. - """ - - return [ - node.name() - for node in nodes - ] - - -def get_nodes_by_names(names): - """Get list of nuke nodes based on their names. - - Args: - names (List[str]): List of node names to be found. - - Returns: - List[nuke.Node]: List of nodes found by name. - """ - - return [ - nuke.toNode(name) - for name in names - ] - - -def get_viewer_config_from_string(input_string): - """Convert string to display and viewer string - - Args: - input_string (str): string with viewer - - Raises: - IndexError: if more then one slash in input string - IndexError: if missing closing bracket - - Returns: - tuple[str]: display, viewer - """ - display = None - viewer = input_string - # check if () or / or \ in name - if "/" in viewer: - split = viewer.split("/") - - # rise if more then one column - if len(split) > 2: - raise IndexError(( - "Viewer Input string is not correct. " - "more then two `/` slashes! {}" - ).format(input_string)) - - viewer = split[1] - display = split[0] - elif "(" in viewer: - pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]" - result_ = re.findall(pattern, viewer) - try: - result_ = result_.pop() - display = str(result_[1]).rstrip() - viewer = str(result_[0]).rstrip() - except IndexError: - raise IndexError(( - "Viewer Input string is not correct. " - "Missing bracket! {}" - ).format(input_string)) - - return (display, viewer) - - -def create_viewer_profile_string(viewer, display=None, path_like=False): - """Convert viewer and display to string - - Args: - viewer (str): viewer name - display (Optional[str]): display name - path_like (Optional[bool]): if True, return path like string - - Returns: - str: viewer config string - """ - if not display: - return viewer - - if path_like: - return "{}/{}".format(display, viewer) - return "{} ({})".format(viewer, display) - - -def get_filenames_without_hash(filename, frame_start, frame_end): - """Get filenames without frame hash - i.e. "renderCompositingMain.baking.0001.exr" - - Args: - filename (str): filename with frame hash - frame_start (str): start of the frame - frame_end (str): end of the frame - - Returns: - list: filename per frame of the sequence - """ - filenames = [] - for frame in range(int(frame_start), (int(frame_end) + 1)): - if "#" in filename: - # use regex to convert #### to {:0>4} - def replace(match): - return "{{:0>{}}}".format(len(match.group())) - filename_without_hashes = re.sub("#+", replace, filename) - new_filename = filename_without_hashes.format(frame) - filenames.append(new_filename) - return filenames - - -def create_camera_node_by_version(): - """Function to create the camera with the latest node class - For Nuke version 14.0 or later, the Camera4 camera node class - would be used - For the version before, the Camera2 camera node class - would be used - Returns: - Node: camera node - """ - nuke_number_version = nuke.NUKE_VERSION_MAJOR - if nuke_number_version >= 14: - return nuke.createNode("Camera4") - else: - return nuke.createNode("Camera2") - - -def link_knobs(knobs, node, group_node): - """Link knobs from inside `group_node`""" - - missing_knobs = [] - for knob in knobs: - if knob in group_node.knobs(): - continue - - if knob not in node.knobs().keys(): - missing_knobs.append(knob) - - link = nuke.Link_Knob("") - link.makeLink(node.name(), knob) - link.setName(knob) - link.setFlag(0x1000) - group_node.addKnob(link) - - if missing_knobs: - raise ValueError( - "Write node exposed knobs missing:\n\n{}\n\nPlease review" - " project settings.".format("\n".join(missing_knobs)) - ) diff --git a/server_addon/nuke/client/ayon_nuke/api/pipeline.py b/server_addon/nuke/client/ayon_nuke/api/pipeline.py deleted file mode 100644 index 2ba430c272..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/pipeline.py +++ /dev/null @@ -1,641 +0,0 @@ -import nuke - -import os -import importlib -from collections import OrderedDict, defaultdict - -import pyblish.api - -from ayon_core.host import ( - HostBase, - IWorkfileHost, - ILoadHost, - IPublishHost -) -from ayon_core.settings import get_current_project_settings -from ayon_core.lib import register_event_callback, Logger -from ayon_core.pipeline import ( - register_loader_plugin_path, - register_creator_plugin_path, - register_inventory_action_path, - register_workfile_build_plugin_path, - AYON_INSTANCE_ID, - AVALON_INSTANCE_ID, - AVALON_CONTAINER_ID, - get_current_folder_path, - get_current_task_name, - registered_host, -) -from ayon_core.pipeline.workfile import BuildWorkfile -from ayon_core.tools.utils import host_tools -from ayon_nuke import NUKE_ROOT_DIR -from ayon_core.tools.workfile_template_build import open_template_ui - -from .lib import ( - Context, - ROOT_DATA_KNOB, - INSTANCE_DATA_KNOB, - get_main_window, - WorkfileSettings, - start_workfile_template_builder, - launch_workfiles_app, - check_inventory_versions, - set_avalon_knob_data, - read_avalon_data, - on_script_load, - dirmap_file_name_filter, - add_scripts_menu, - add_scripts_gizmo, - get_node_data, - set_node_data, - MENU_LABEL, -) -from .workfile_template_builder import ( - build_workfile_template, - create_placeholder, - update_placeholder, - NukeTemplateBuilder, -) -from .workio import ( - open_file, - save_file, - file_extensions, - has_unsaved_changes, - work_root, - current_file -) -from .constants import ASSIST -from . import push_to_project - -log = Logger.get_logger(__name__) - -PLUGINS_DIR = os.path.join(NUKE_ROOT_DIR, "plugins") -PUBLISH_PATH = os.path.join(PLUGINS_DIR, "publish") -LOAD_PATH = os.path.join(PLUGINS_DIR, "load") -CREATE_PATH = os.path.join(PLUGINS_DIR, "create") -INVENTORY_PATH = os.path.join(PLUGINS_DIR, "inventory") -WORKFILE_BUILD_PATH = os.path.join(PLUGINS_DIR, "workfile_build") - -# registering pyblish gui regarding settings in presets -if os.getenv("PYBLISH_GUI", None): - pyblish.api.register_gui(os.getenv("PYBLISH_GUI", None)) - - -class NukeHost( - HostBase, IWorkfileHost, ILoadHost, IPublishHost -): - name = "nuke" - - def open_workfile(self, filepath): - return open_file(filepath) - - def save_workfile(self, filepath=None): - return save_file(filepath) - - def work_root(self, session): - return work_root(session) - - def get_current_workfile(self): - return current_file() - - def workfile_has_unsaved_changes(self): - return has_unsaved_changes() - - def get_workfile_extensions(self): - return file_extensions() - - def get_containers(self): - return ls() - - def install(self): - """Installing all requirements for Nuke host""" - - pyblish.api.register_host("nuke") - - self.log.info("Registering Nuke plug-ins..") - pyblish.api.register_plugin_path(PUBLISH_PATH) - register_loader_plugin_path(LOAD_PATH) - register_creator_plugin_path(CREATE_PATH) - register_inventory_action_path(INVENTORY_PATH) - register_workfile_build_plugin_path(WORKFILE_BUILD_PATH) - - # Register AYON event for workfiles loading. - register_event_callback("workio.open_file", check_inventory_versions) - register_event_callback("taskChanged", change_context_label) - - _install_menu() - - # add script menu - add_scripts_menu() - add_scripts_gizmo() - - add_nuke_callbacks() - - launch_workfiles_app() - - def get_context_data(self): - root_node = nuke.root() - return get_node_data(root_node, ROOT_DATA_KNOB) - - def update_context_data(self, data, changes): - root_node = nuke.root() - set_node_data(root_node, ROOT_DATA_KNOB, data) - - -def add_nuke_callbacks(): - """ Adding all available nuke callbacks - """ - nuke_settings = get_current_project_settings()["nuke"] - workfile_settings = WorkfileSettings() - - # Set context settings. - nuke.addOnCreate( - workfile_settings.set_context_settings, nodeClass="Root") - - # adding favorites to file browser - nuke.addOnCreate(workfile_settings.set_favorites, nodeClass="Root") - - # template builder callbacks - nuke.addOnCreate(start_workfile_template_builder, nodeClass="Root") - - # fix ffmpeg settings on script - nuke.addOnScriptLoad(on_script_load) - - # set checker for last versions on loaded containers - nuke.addOnScriptLoad(check_inventory_versions) - nuke.addOnScriptSave(check_inventory_versions) - - # set apply all workfile settings on script load and save - nuke.addOnScriptLoad(WorkfileSettings().set_context_settings) - - if nuke_settings["dirmap"]["enabled"]: - log.info("Added Nuke's dir-mapping callback ...") - # Add dirmap for file paths. - nuke.addFilenameFilter(dirmap_file_name_filter) - - log.info("Added Nuke callbacks ...") - - -def reload_config(): - """Attempt to reload pipeline at run-time. - - CAUTION: This is primarily for development and debugging purposes. - - """ - - for module in ( - "ayon_nuke.api.actions", - "ayon_nuke.api.menu", - "ayon_nuke.api.plugin", - "ayon_nuke.api.lib", - ): - log.info("Reloading module: {}...".format(module)) - - module = importlib.import_module(module) - - try: - importlib.reload(module) - except AttributeError as e: - from importlib import reload - log.warning("Cannot reload module: {}".format(e)) - reload(module) - - -def _show_workfiles(): - # Make sure parent is not set - # - this makes Workfiles tool as separated window which - # avoid issues with reopening - # - it is possible to explicitly change on top flag of the tool - host_tools.show_workfiles(parent=None, on_top=False) - - -def get_context_label(): - return "{0}, {1}".format( - get_current_folder_path(), - get_current_task_name() - ) - - -def _install_menu(): - """Install AYON menu into Nuke's main menu bar.""" - - # uninstall original AYON menu - main_window = get_main_window() - menubar = nuke.menu("Nuke") - menu = menubar.addMenu(MENU_LABEL) - - if not ASSIST: - label = get_context_label() - context_action_item = menu.addCommand("Context") - context_action_item.setEnabled(False) - - Context.context_action_item = context_action_item - - context_action = context_action_item.action() - context_action.setText(label) - - # add separator after context label - menu.addSeparator() - - menu.addCommand( - "Work Files...", - _show_workfiles - ) - - menu.addSeparator() - if not ASSIST: - # only add parent if nuke version is 14 or higher - # known issue with no solution yet - menu.addCommand( - "Create...", - lambda: host_tools.show_publisher( - parent=main_window, - tab="create" - ) - ) - # only add parent if nuke version is 14 or higher - # known issue with no solution yet - menu.addCommand( - "Publish...", - lambda: host_tools.show_publisher( - parent=main_window, - tab="publish" - ) - ) - - menu.addCommand( - "Load...", - lambda: host_tools.show_loader( - parent=main_window, - use_context=True - ) - ) - menu.addCommand( - "Manage...", - lambda: host_tools.show_scene_inventory(parent=main_window) - ) - menu.addSeparator() - menu.addCommand( - "Library...", - lambda: host_tools.show_library_loader( - parent=main_window - ) - ) - menu.addSeparator() - menu.addCommand( - "Set Resolution", - lambda: WorkfileSettings().reset_resolution() - ) - menu.addCommand( - "Set Frame Range", - lambda: WorkfileSettings().reset_frame_range_handles() - ) - menu.addCommand( - "Set Colorspace", - lambda: WorkfileSettings().set_colorspace() - ) - menu.addCommand( - "Apply All Settings", - lambda: WorkfileSettings().set_context_settings() - ) - - menu.addSeparator() - menu.addCommand( - "Build Workfile", - lambda: BuildWorkfile().process() - ) - - menu_template = menu.addMenu("Template Builder") - menu_template.addCommand( - "Build Workfile from template", - lambda: build_workfile_template() - ) - - if not ASSIST: - menu_template.addSeparator() - menu_template.addCommand( - "Open template", - lambda: open_template_ui( - NukeTemplateBuilder(registered_host()), get_main_window() - ) - ) - menu_template.addCommand( - "Create Place Holder", - lambda: create_placeholder() - ) - menu_template.addCommand( - "Update Place Holder", - lambda: update_placeholder() - ) - - menu.addCommand( - "Push to Project", - lambda: push_to_project.main() - ) - - menu.addSeparator() - menu.addCommand( - "Experimental tools...", - lambda: host_tools.show_experimental_tools_dialog(parent=main_window) - ) - menu.addSeparator() - # add reload pipeline only in debug mode - if bool(os.getenv("NUKE_DEBUG")): - menu.addSeparator() - menu.addCommand("Reload Pipeline", reload_config) - - # adding shortcuts - add_shortcuts_from_presets() - - -def change_context_label(): - if ASSIST: - return - - context_action_item = Context.context_action_item - if context_action_item is None: - return - context_action = context_action_item.action() - - old_label = context_action.text() - new_label = get_context_label() - - context_action.setText(new_label) - - log.info("Task label changed from `{}` to `{}`".format( - old_label, new_label)) - - -def add_shortcuts_from_presets(): - menubar = nuke.menu("Nuke") - nuke_presets = get_current_project_settings()["nuke"]["general"] - - if nuke_presets.get("menu"): - menu_label_mapping = { - "create": "Create...", - "manage": "Manage...", - "load": "Load...", - "build_workfile": "Build Workfile", - "publish": "Publish..." - } - - for command_name, shortcut_str in nuke_presets.get("menu").items(): - log.info("menu_name `{}` | menu_label `{}`".format( - command_name, MENU_LABEL - )) - log.info("Adding Shortcut `{}` to `{}`".format( - shortcut_str, command_name - )) - try: - menu = menubar.findItem(MENU_LABEL) - item_label = menu_label_mapping[command_name] - menuitem = menu.findItem(item_label) - menuitem.setShortcut(shortcut_str) - except (AttributeError, KeyError) as e: - log.error(e) - - -def containerise(node, - name, - namespace, - context, - loader=None, - data=None): - """Bundle `node` into an assembly and imprint it with metadata - - Containerisation enables a tracking of version, author and origin - for loaded assets. - - Arguments: - node (nuke.Node): Nuke's node object to imprint as container - name (str): Name of resulting assembly - namespace (str): Namespace under which to host container - context (dict): Asset information - loader (str, optional): Name of node used to produce this container. - - Returns: - node (nuke.Node): containerised nuke's node object - - """ - data = OrderedDict( - [ - ("schema", "openpype:container-2.0"), - ("id", AVALON_CONTAINER_ID), - ("name", name), - ("namespace", namespace), - ("loader", str(loader)), - ("representation", context["representation"]["id"]), - ], - - **data or dict() - ) - - set_avalon_knob_data(node, data) - - # set tab to first native - node.setTab(0) - - return node - - -def parse_container(node): - """Returns containerised data of a node - - Reads the imprinted data from `containerise`. - - Arguments: - node (nuke.Node): Nuke's node object to read imprinted data - - Returns: - dict: The container schema data for this container node. - - """ - data = read_avalon_data(node) - - # If not all required data return the empty container - required = ["schema", "id", "name", - "namespace", "loader", "representation"] - if not all(key in data for key in required): - return - - # Store the node's name - data.update({ - "objectName": node.fullName(), - "node": node, - }) - - return data - - -def update_container(node, keys=None): - """Returns node with updateted containder data - - Arguments: - node (nuke.Node): The node in Nuke to imprint as container, - keys (dict, optional): data which should be updated - - Returns: - node (nuke.Node): nuke node with updated container data - - Raises: - TypeError on given an invalid container node - - """ - keys = keys or dict() - - container = parse_container(node) - if not container: - raise TypeError("Not a valid container node.") - - container.update(keys) - node = set_avalon_knob_data(node, container) - - return node - - -def ls(): - """List available containers. - - This function is used by the Container Manager in Nuke. You'll - need to implement a for-loop that then *yields* one Container at - a time. - """ - all_nodes = nuke.allNodes(recurseGroups=False) - - nodes = [n for n in all_nodes] - - for n in nodes: - container = parse_container(n) - if container: - yield container - - -def list_instances(creator_id=None): - """List all created instances to publish from current workfile. - - For SubsetManager - - Args: - creator_id (Optional[str]): creator identifier - - Returns: - (list) of dictionaries matching instances format - """ - instances_by_order = defaultdict(list) - product_instances = [] - instance_ids = set() - - for node in nuke.allNodes(recurseGroups=True): - - if node.Class() in ["Viewer", "Dot"]: - continue - - try: - if node["disable"].value(): - continue - except NameError: - # pass if disable knob doesn't exist - pass - - # get data from avalon knob - instance_data = get_node_data( - node, INSTANCE_DATA_KNOB) - - if not instance_data: - continue - - if instance_data["id"] not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - continue - - if creator_id and instance_data["creator_identifier"] != creator_id: - continue - - instance_id = instance_data.get("instance_id") - if not instance_id: - pass - elif instance_id in instance_ids: - instance_data.pop("instance_id") - else: - instance_ids.add(instance_id) - - # node name could change, so update product name data - _update_product_name_data(instance_data, node) - - if "render_order" not in node.knobs(): - product_instances.append((node, instance_data)) - continue - - order = int(node["render_order"].value()) - instances_by_order[order].append((node, instance_data)) - - # Sort instances based on order attribute or product name. - # TODO: remove in future Publisher enhanced with sorting - ordered_instances = [] - for key in sorted(instances_by_order.keys()): - instances_by_product = defaultdict(list) - for node, data_ in instances_by_order[key]: - product_name = data_.get("productName") - if product_name is None: - product_name = data_.get("subset") - instances_by_product[product_name].append((node, data_)) - for subkey in sorted(instances_by_product.keys()): - ordered_instances.extend(instances_by_product[subkey]) - - instances_by_product = defaultdict(list) - for node, data_ in product_instances: - product_name = data_.get("productName") - if product_name is None: - product_name = data_.get("subset") - instances_by_product[product_name].append((node, data_)) - for key in sorted(instances_by_product.keys()): - ordered_instances.extend(instances_by_product[key]) - - return ordered_instances - - -def _update_product_name_data(instance_data, node): - """Update product name data in instance data. - - Args: - instance_data (dict): instance creator data - node (nuke.Node): nuke node - """ - # make sure node name is product name - old_product_name = instance_data.get("productName") - if old_product_name is None: - old_product_name = instance_data.get("subset") - old_variant = instance_data["variant"] - product_name_root = old_product_name.replace(old_variant, "") - - new_product_name = node.name() - new_variant = new_product_name.replace(product_name_root, "") - - instance_data["productName"] = new_product_name - instance_data["variant"] = new_variant - - -def remove_instance(instance): - """Remove instance from current workfile metadata. - - For SubsetManager - - Args: - instance (dict): instance representation from subsetmanager model - """ - instance_node = instance.transient_data["node"] - instance_knob = instance_node.knobs()[INSTANCE_DATA_KNOB] - instance_node.removeKnob(instance_knob) - nuke.delete(instance_node) - - -def select_instance(instance): - """ - Select instance in Node View - - Args: - instance (dict): instance representation from subsetmanager model - """ - instance_node = instance.transient_data["node"] - instance_node["selected"].setValue(True) diff --git a/server_addon/nuke/client/ayon_nuke/api/plugin.py b/server_addon/nuke/client/ayon_nuke/api/plugin.py deleted file mode 100644 index fc30f328c7..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/plugin.py +++ /dev/null @@ -1,1227 +0,0 @@ -import nuke -import re -import os -import sys -import six -import random -import string -from collections import defaultdict - -from ayon_core.settings import get_current_project_settings -from ayon_core.lib import ( - BoolDef, - EnumDef -) -from ayon_core.lib import StringTemplate -from ayon_core.pipeline import ( - LoaderPlugin, - CreatorError, - Creator as NewCreator, - CreatedInstance, - get_current_task_name, - AYON_INSTANCE_ID, - AVALON_INSTANCE_ID, -) -from ayon_core.pipeline.colorspace import ( - get_display_view_colorspace_name, - get_colorspace_settings_from_publish_context, - set_colorspace_data_to_representation -) -from ayon_core.lib.transcoding import ( - VIDEO_EXTENSIONS -) -from .lib import ( - INSTANCE_DATA_KNOB, - Knobby, - maintained_selection, - get_avalon_knob_data, - set_node_knobs_from_settings, - set_node_data, - get_node_data, - get_view_process_node, - get_filenames_without_hash, - link_knobs -) -from .pipeline import ( - list_instances, - remove_instance -) - - -def _collect_and_cache_nodes(creator): - key = "openpype.nuke.nodes" - if key not in creator.collection_shared_data: - instances_by_identifier = defaultdict(list) - for item in list_instances(): - _, instance_data = item - identifier = instance_data["creator_identifier"] - instances_by_identifier[identifier].append(item) - creator.collection_shared_data[key] = instances_by_identifier - return creator.collection_shared_data[key] - - -class NukeCreatorError(CreatorError): - pass - - -class NukeCreator(NewCreator): - selected_nodes = [] - - def pass_pre_attributes_to_instance( - self, - instance_data, - pre_create_data, - keys=None - ): - if not keys: - keys = pre_create_data.keys() - - creator_attrs = instance_data["creator_attributes"] = {} - for pass_key in keys: - creator_attrs[pass_key] = pre_create_data[pass_key] - - def check_existing_product(self, product_name): - """Make sure product name is unique. - - It search within all nodes recursively - and checks if product name is found in - any node having instance data knob. - - Arguments: - product_name (str): Product name - """ - - for node in nuke.allNodes(recurseGroups=True): - # make sure testing node is having instance knob - if INSTANCE_DATA_KNOB not in node.knobs().keys(): - continue - node_data = get_node_data(node, INSTANCE_DATA_KNOB) - - if not node_data: - # a node has no instance data - continue - - # test if product name is matching - if node_data.get("productType") == product_name: - raise NukeCreatorError( - ( - "A publish instance for '{}' already exists " - "in nodes! Please change the variant " - "name to ensure unique output." - ).format(product_name) - ) - - def create_instance_node( - self, - node_name, - knobs=None, - parent=None, - node_type=None - ): - """Create node representing instance. - - Arguments: - node_name (str): Name of the new node. - knobs (OrderedDict): node knobs name and values - parent (str): Name of the parent node. - node_type (str, optional): Nuke node Class. - - Returns: - nuke.Node: Newly created instance node. - - """ - node_type = node_type or "NoOp" - - node_knobs = knobs or {} - - # set parent node - parent_node = nuke.root() - if parent: - parent_node = nuke.toNode(parent) - - try: - with parent_node: - created_node = nuke.createNode(node_type) - created_node["name"].setValue(node_name) - - for key, values in node_knobs.items(): - if key in created_node.knobs(): - created_node["key"].setValue(values) - except Exception as _err: - raise NukeCreatorError("Creating have failed: {}".format(_err)) - - return created_node - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - if self.selected_nodes == []: - raise NukeCreatorError("Creator error: No active selection") - else: - self.selected_nodes = [] - - def create(self, product_name, instance_data, pre_create_data): - - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - # make sure product name is unique - self.check_existing_product(product_name) - - try: - instance_node = self.create_instance_node( - product_name, - node_type=instance_data.pop("node_type", None) - ) - instance = CreatedInstance( - self.product_type, - product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - set_node_data( - instance_node, INSTANCE_DATA_KNOB, instance.data_to_store()) - - return instance - - except Exception as er: - six.reraise( - NukeCreatorError, - NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2]) - - def collect_instances(self): - cached_instances = _collect_and_cache_nodes(self) - attr_def_keys = { - attr_def.key - for attr_def in self.get_instance_attr_defs() - } - attr_def_keys.discard(None) - - for (node, data) in cached_instances[self.identifier]: - created_instance = CreatedInstance.from_existing( - data, self - ) - created_instance.transient_data["node"] = node - self._add_instance_to_context(created_instance) - - for key in ( - set(created_instance["creator_attributes"].keys()) - - attr_def_keys - ): - created_instance["creator_attributes"].pop(key) - - def update_instances(self, update_list): - for created_inst, changes in update_list: - instance_node = created_inst.transient_data["node"] - - # update instance node name if product name changed - if "productName" in changes.changed_keys: - instance_node["name"].setValue( - changes["productName"].new_value - ) - - # in case node is not existing anymore (user erased it manually) - try: - instance_node.fullName() - except ValueError: - self.remove_instances([created_inst]) - continue - - set_node_data( - instance_node, - INSTANCE_DATA_KNOB, - created_inst.data_to_store() - ) - - def remove_instances(self, instances): - for instance in instances: - remove_instance(instance) - self._remove_instance_from_context(instance) - - def get_pre_create_attr_defs(self): - return [ - BoolDef( - "use_selection", - default=not self.create_context.headless, - label="Use selection" - ) - ] - - def get_creator_settings(self, project_settings, settings_key=None): - if not settings_key: - settings_key = self.__class__.__name__ - return project_settings["nuke"]["create"][settings_key] - - -class NukeWriteCreator(NukeCreator): - """Add Publishable Write node""" - - identifier = "create_write" - label = "Create Write" - product_type = "write" - icon = "sign-out" - - def get_linked_knobs(self): - linked_knobs = [] - if "channels" in self.instance_attributes: - linked_knobs.append("channels") - if "ordered" in self.instance_attributes: - linked_knobs.append("render_order") - if "use_range_limit" in self.instance_attributes: - linked_knobs.extend(["___", "first", "last", "use_limit"]) - - return linked_knobs - - def integrate_links(self, node, outputs=True): - # skip if no selection - if not self.selected_node: - return - - # collect dependencies - input_nodes = [self.selected_node] - dependent_nodes = self.selected_node.dependent() if outputs else [] - - # relinking to collected connections - for i, input in enumerate(input_nodes): - node.setInput(i, input) - - # make it nicer in graph - node.autoplace() - - # relink also dependent nodes - for dep_nodes in dependent_nodes: - dep_nodes.setInput(0, node) - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - selected_nodes = nuke.selectedNodes() - if selected_nodes == []: - raise NukeCreatorError("Creator error: No active selection") - elif len(selected_nodes) > 1: - NukeCreatorError("Creator error: Select only one camera node") - self.selected_node = selected_nodes[0] - else: - self.selected_node = None - - def get_pre_create_attr_defs(self): - attr_defs = [ - BoolDef("use_selection", label="Use selection"), - self._get_render_target_enum() - ] - return attr_defs - - def get_instance_attr_defs(self): - attr_defs = [ - self._get_render_target_enum(), - ] - # add reviewable attribute - if "reviewable" in self.instance_attributes: - attr_defs.append(self._get_reviewable_bool()) - - return attr_defs - - def _get_render_target_enum(self): - rendering_targets = { - "local": "Local machine rendering", - "frames": "Use existing frames" - } - if ("farm_rendering" in self.instance_attributes): - rendering_targets["frames_farm"] = "Use existing frames - farm" - rendering_targets["farm"] = "Farm rendering" - - return EnumDef( - "render_target", - items=rendering_targets, - label="Render target" - ) - - def _get_reviewable_bool(self): - return BoolDef( - "review", - default=True, - label="Review" - ) - - def create(self, product_name, instance_data, pre_create_data): - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - # make sure product name is unique - self.check_existing_product(product_name) - - instance_node = self.create_instance_node( - product_name, - instance_data - ) - - try: - instance = CreatedInstance( - self.product_type, - product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - set_node_data( - instance_node, INSTANCE_DATA_KNOB, instance.data_to_store()) - - return instance - - except Exception as er: - six.reraise( - NukeCreatorError, - NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2] - ) - - def apply_settings(self, project_settings): - """Method called on initialization of plugin to apply settings.""" - - # plugin settings - plugin_settings = self.get_creator_settings(project_settings) - temp_rendering_path_template = ( - plugin_settings.get("temp_rendering_path_template") - or self.temp_rendering_path_template - ) - # TODO remove template key replacements - temp_rendering_path_template = ( - temp_rendering_path_template - .replace("{product[name]}", "{subset}") - .replace("{product[type]}", "{family}") - .replace("{task[name]}", "{task}") - .replace("{folder[name]}", "{asset}") - ) - # individual attributes - self.instance_attributes = plugin_settings.get( - "instance_attributes") or self.instance_attributes - self.prenodes = plugin_settings["prenodes"] - self.default_variants = plugin_settings.get( - "default_variants") or self.default_variants - self.temp_rendering_path_template = temp_rendering_path_template - - -def get_instance_group_node_childs(instance): - """Return list of instance group node children - - Args: - instance (pyblish.Instance): pyblish instance - - Returns: - list: [nuke.Node] - """ - node = instance.data["transientData"]["node"] - - if node.Class() != "Group": - return - - # collect child nodes - child_nodes = [] - # iterate all nodes - for node in nuke.allNodes(group=node): - # add contained nodes to instance's node list - child_nodes.append(node) - - return child_nodes - - -def get_colorspace_from_node(node): - # Add version data to instance - colorspace = node["colorspace"].value() - - # remove default part of the string - if "default (" in colorspace: - colorspace = re.sub(r"default.\(|\)", "", colorspace) - - return colorspace - - -def get_review_presets_config(): - settings = get_current_project_settings() - review_profiles = ( - settings["core"] - ["publish"] - ["ExtractReview"] - ["profiles"] - ) - - outputs = {} - for profile in review_profiles: - outputs.update(profile.get("outputs", {})) - - return [str(name) for name, _prop in outputs.items()] - - -class NukeLoader(LoaderPlugin): - container_id_knob = "containerId" - container_id = None - - def reset_container_id(self): - self.container_id = "".join(random.choice( - string.ascii_uppercase + string.digits) for _ in range(10)) - - def get_container_id(self, node): - id_knob = node.knobs().get(self.container_id_knob) - return id_knob.value() if id_knob else None - - def get_members(self, source): - """Return nodes that has same "containerId" as `source`""" - source_id = self.get_container_id(source) - return [node for node in nuke.allNodes(recurseGroups=True) - if self.get_container_id(node) == source_id - and node is not source] if source_id else [] - - def set_as_member(self, node): - source_id = self.get_container_id(node) - - if source_id: - node[self.container_id_knob].setValue(source_id) - else: - HIDEN_FLAG = 0x00040000 - _knob = Knobby( - "String_Knob", - self.container_id, - flags=[ - nuke.READ_ONLY, - HIDEN_FLAG - ]) - knob = _knob.create(self.container_id_knob) - node.addKnob(knob) - - def clear_members(self, parent_node): - parent_class = parent_node.Class() - members = self.get_members(parent_node) - - dependent_nodes = None - for node in members: - _depndc = [n for n in node.dependent() if n not in members] - if not _depndc: - continue - - dependent_nodes = _depndc - break - - for member in members: - if member.Class() == parent_class: - continue - self.log.info("removing node: `{}".format(member.name())) - nuke.delete(member) - - return dependent_nodes - - -class ExporterReview(object): - """ - Base class object for generating review data from Nuke - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - """ - data = None - publish_on_farm = False - - def __init__(self, - klass, - instance, - multiple_presets=True - ): - - self.log = klass.log - self.instance = instance - self.multiple_presets = multiple_presets - self.path_in = self.instance.data.get("path", None) - self.staging_dir = self.instance.data["stagingDir"] - self.collection = self.instance.data.get("collection", None) - self.data = {"representations": []} - - def get_file_info(self): - if self.collection: - # get path - self.fname = os.path.basename( - self.collection.format("{head}{padding}{tail}") - ) - self.fhead = self.collection.format("{head}") - - # get first and last frame - self.first_frame = min(self.collection.indexes) - self.last_frame = max(self.collection.indexes) - - # make sure slate frame is not included - frame_start_handle = self.instance.data["frameStartHandle"] - if frame_start_handle > self.first_frame: - self.first_frame = frame_start_handle - - else: - self.fname = os.path.basename(self.path_in) - self.fhead = os.path.splitext(self.fname)[0] + "." - self.first_frame = self.instance.data["frameStartHandle"] - self.last_frame = self.instance.data["frameEndHandle"] - - if "#" in self.fhead: - self.fhead = self.fhead.replace("#", "")[:-1] - - def get_representation_data( - self, - tags=None, - range=False, - custom_tags=None, - colorspace=None, - ): - """ Add representation data to self.data - - Args: - tags (list[str], optional): list of defined tags. - Defaults to None. - range (bool, optional): flag for adding ranges. - Defaults to False. - custom_tags (list[str], optional): user inputted custom tags. - Defaults to None. - colorspace (str, optional): colorspace name. - Defaults to None. - """ - add_tags = tags or [] - repre = { - "name": self.name, - "ext": self.ext, - "files": self.file, - "stagingDir": self.staging_dir, - "tags": [self.name.replace("_", "-")] + add_tags, - "data": { - # making sure that once intermediate file is published - # as representation, we will be able to then identify it - # from representation.data.isIntermediate - "isIntermediate": True - }, - } - - if custom_tags: - repre["custom_tags"] = custom_tags - - if range: - repre.update({ - "frameStart": self.first_frame, - "frameEnd": self.last_frame, - }) - if ".{}".format(self.ext) not in VIDEO_EXTENSIONS: - filenames = get_filenames_without_hash( - self.file, self.first_frame, self.last_frame) - repre["files"] = filenames - - if self.multiple_presets: - repre["outputName"] = self.name - - if self.publish_on_farm: - repre["tags"].append("publish_on_farm") - - # add colorspace data to representation - if colorspace: - set_colorspace_data_to_representation( - repre, - self.instance.context.data, - colorspace=colorspace, - log=self.log - ) - self.data["representations"].append(repre) - - def get_imageio_baking_profile(self): - from . import lib as opnlib - nuke_imageio = opnlib.get_nuke_imageio_settings() - - if nuke_imageio["baking_target"]["enabled"]: - return nuke_imageio["baking_target"] - else: - # viewer is having display and view keys only and it is - # display_view type - return { - "type": "display_view", - "display_view": nuke_imageio["viewer"], - } - - -class ExporterReviewLut(ExporterReview): - """ - Generator object for review lut from Nuke - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - - """ - _temp_nodes = [] - - def __init__(self, - klass, - instance, - name=None, - ext=None, - cube_size=None, - lut_size=None, - lut_style=None, - multiple_presets=True): - # initialize parent class - super(ExporterReviewLut, self).__init__( - klass, instance, multiple_presets) - - # deal with now lut defined in viewer lut - if hasattr(klass, "viewer_lut_raw"): - self.viewer_lut_raw = klass.viewer_lut_raw - else: - self.viewer_lut_raw = False - - self.name = name or "baked_lut" - self.ext = ext or "cube" - self.cube_size = cube_size or 32 - self.lut_size = lut_size or 1024 - self.lut_style = lut_style or "linear" - - # set frame start / end and file name to self - self.get_file_info() - - self.log.info("File info was set...") - - self.file = self.fhead + self.name + ".{}".format(self.ext) - self.path = os.path.join( - self.staging_dir, self.file).replace("\\", "/") - - def clean_nodes(self): - for node in self._temp_nodes: - nuke.delete(node) - self._temp_nodes = [] - self.log.info("Deleted nodes...") - - def generate_lut(self, **kwargs): - bake_viewer_process = kwargs["bake_viewer_process"] - bake_viewer_input_process_node = kwargs[ - "bake_viewer_input_process"] - - # ---------- start nodes creation - - # CMSTestPattern - cms_node = nuke.createNode("CMSTestPattern") - cms_node["cube_size"].setValue(self.cube_size) - # connect - self._temp_nodes.append(cms_node) - self.previous_node = cms_node - - if bake_viewer_process: - # Node View Process - if bake_viewer_input_process_node: - ipn = get_view_process_node() - if ipn is not None: - # connect - ipn.setInput(0, self.previous_node) - self._temp_nodes.append(ipn) - self.previous_node = ipn - self.log.debug( - "ViewProcess... `{}`".format(self._temp_nodes)) - - if not self.viewer_lut_raw: - # OCIODisplay - dag_node = nuke.createNode("OCIODisplay") - # connect - dag_node.setInput(0, self.previous_node) - self._temp_nodes.append(dag_node) - self.previous_node = dag_node - self.log.debug( - "OCIODisplay... `{}`".format(self._temp_nodes)) - - # GenerateLUT - gen_lut_node = nuke.createNode("GenerateLUT") - gen_lut_node["file"].setValue(self.path) - gen_lut_node["file_type"].setValue(".{}".format(self.ext)) - gen_lut_node["lut1d"].setValue(self.lut_size) - gen_lut_node["style1d"].setValue(self.lut_style) - # connect - gen_lut_node.setInput(0, self.previous_node) - self._temp_nodes.append(gen_lut_node) - # ---------- end nodes creation - - # Export lut file - nuke.execute( - gen_lut_node.name(), - int(self.first_frame), - int(self.first_frame)) - - self.log.info("Exported...") - - # ---------- generate representation data - self.get_representation_data() - - # ---------- Clean up - self.clean_nodes() - - return self.data - - -class ExporterReviewMov(ExporterReview): - """ - Metaclass for generating review mov files - - Args: - klass (pyblish.plugin): pyblish plugin parent - instance (pyblish.instance): instance of pyblish context - - """ - _temp_nodes = {} - - def __init__(self, - klass, - instance, - name=None, - ext=None, - multiple_presets=True - ): - # initialize parent class - super(ExporterReviewMov, self).__init__( - klass, instance, multiple_presets) - # passing presets for nodes to self - self.nodes = klass.nodes if hasattr(klass, "nodes") else {} - - # deal with now lut defined in viewer lut - self.viewer_lut_raw = klass.viewer_lut_raw - self.write_colorspace = instance.data["colorspace"] - self.color_channels = instance.data["color_channels"] - self.formatting_data = instance.data["anatomyData"] - - self.name = name or "baked" - self.ext = ext or "mov" - - # set frame start / end and file name to self - self.get_file_info() - - self.log.info("File info was set...") - - if ".{}".format(self.ext) in VIDEO_EXTENSIONS: - self.file = "{}{}.{}".format( - self.fhead, self.name, self.ext) - else: - # Output is image (or image sequence) - # When the file is an image it's possible it - # has extra information after the `fhead` that - # we want to preserve, e.g. like frame numbers - # or frames hashes like `####` - filename_no_ext = os.path.splitext( - os.path.basename(self.path_in))[0] - after_head = filename_no_ext[len(self.fhead):] - self.file = "{}{}.{}.{}".format( - self.fhead, self.name, after_head, self.ext) - self.path = os.path.join( - self.staging_dir, self.file).replace("\\", "/") - - def clean_nodes(self, node_name): - for node in self._temp_nodes[node_name]: - nuke.delete(node) - self._temp_nodes[node_name] = [] - self.log.info("Deleted nodes...") - - def render(self, render_node_name): - self.log.info("Rendering... ") - # Render Write node - nuke.execute( - render_node_name, - int(self.first_frame), - int(self.last_frame)) - - self.log.info("Rendered...") - - def save_file(self): - import shutil - with maintained_selection(): - self.log.info("Saving nodes as file... ") - # create nk path - path = f"{os.path.splitext(self.path)[0]}.nk" - # save file to the path - if not os.path.exists(os.path.dirname(path)): - os.makedirs(os.path.dirname(path)) - shutil.copyfile(self.instance.context.data["currentFile"], path) - - self.log.info("Nodes exported...") - return path - - def generate_mov(self, farm=False, delete=True, **kwargs): - # colorspace data - colorspace = self.write_colorspace - - # get colorspace settings - # get colorspace data from context - config_data, _ = get_colorspace_settings_from_publish_context( - self.instance.context.data) - - add_tags = [] - self.publish_on_farm = farm - read_raw = kwargs["read_raw"] - bake_viewer_process = kwargs["bake_viewer_process"] - bake_viewer_input_process_node = kwargs[ - "bake_viewer_input_process"] - - baking_colorspace = self.get_imageio_baking_profile() - - colorspace_override = kwargs["colorspace_override"] - if colorspace_override["enabled"]: - baking_colorspace = colorspace_override - - fps = self.instance.context.data["fps"] - - self.log.debug(f">> baking_view_profile `{baking_colorspace}`") - - add_custom_tags = kwargs.get("add_custom_tags", []) - - self.log.info(f"__ add_custom_tags: `{add_custom_tags}`") - - product_name = self.instance.data["productName"] - self._temp_nodes[product_name] = [] - - # Read node - r_node = nuke.createNode("Read") - r_node["file"].setValue(self.path_in) - r_node["first"].setValue(self.first_frame) - r_node["origfirst"].setValue(self.first_frame) - r_node["last"].setValue(self.last_frame) - r_node["origlast"].setValue(self.last_frame) - r_node["colorspace"].setValue(self.write_colorspace) - - # do not rely on defaults, set explicitly - # to be sure it is set correctly - r_node["frame_mode"].setValue("expression") - r_node["frame"].setValue("") - - if read_raw: - r_node["raw"].setValue(1) - - # connect to Read node - self._shift_to_previous_node_and_temp( - product_name, r_node, "Read... `{}`" - ) - - # add reformat node - reformat_nodes_config = kwargs["reformat_nodes_config"] - if reformat_nodes_config["enabled"]: - reposition_nodes = reformat_nodes_config["reposition_nodes"] - for reposition_node in reposition_nodes: - node_class = reposition_node["node_class"] - knobs = reposition_node["knobs"] - node = nuke.createNode(node_class) - set_node_knobs_from_settings(node, knobs) - - # connect in order - self._connect_to_above_nodes( - node, product_name, "Reposition node... `{}`" - ) - # append reformatted tag - add_tags.append("reformatted") - - # only create colorspace baking if toggled on - if bake_viewer_process: - if bake_viewer_input_process_node: - # View Process node - ipn = get_view_process_node() - if ipn is not None: - # connect to ViewProcess node - self._connect_to_above_nodes( - ipn, product_name, "ViewProcess... `{}`" - ) - - if not self.viewer_lut_raw: - # OCIODisplay - if baking_colorspace["type"] == "display_view": - display_view = baking_colorspace["display_view"] - - message = "OCIODisplay... '{}'" - node = nuke.createNode("OCIODisplay") - - # assign display and view - display = display_view["display"] - view = display_view["view"] - - # display could not be set in nuke_default config - if display: - # format display string with anatomy data - display = StringTemplate(display).format_strict( - self.formatting_data - ) - node["display"].setValue(display) - - # format view string with anatomy data - view = StringTemplate(view).format_strict( - self.formatting_data) - # assign viewer - node["view"].setValue(view) - - if config_data: - # convert display and view to colorspace - colorspace = get_display_view_colorspace_name( - config_path=config_data["path"], - display=display, view=view - ) - - # OCIOColorSpace - elif baking_colorspace["type"] == "colorspace": - baking_colorspace = baking_colorspace["colorspace"] - # format colorspace string with anatomy data - baking_colorspace = StringTemplate( - baking_colorspace).format_strict(self.formatting_data) - node = nuke.createNode("OCIOColorSpace") - message = "OCIOColorSpace... '{}'" - # no need to set input colorspace since it is driven by - # working colorspace - node["out_colorspace"].setValue(baking_colorspace) - colorspace = baking_colorspace - - else: - raise ValueError( - "Invalid baking color space type: " - f"{baking_colorspace['type']}" - ) - - self._connect_to_above_nodes( - node, product_name, message - ) - - # Write node - write_node = nuke.createNode("Write") - self.log.debug(f"Path: {self.path}") - - write_node["file"].setValue(str(self.path)) - write_node["file_type"].setValue(str(self.ext)) - write_node["channels"].setValue(str(self.color_channels)) - - # Knobs `meta_codec` and `mov64_codec` are not available on centos. - # TODO shouldn't this come from settings on outputs? - try: - write_node["meta_codec"].setValue("ap4h") - except Exception: - self.log.info("`meta_codec` knob was not found") - - try: - write_node["mov64_codec"].setValue("ap4h") - write_node["mov64_fps"].setValue(float(fps)) - except Exception: - self.log.info("`mov64_codec` knob was not found") - - try: - write_node["mov64_write_timecode"].setValue(1) - except Exception: - self.log.info("`mov64_write_timecode` knob was not found") - - write_node["raw"].setValue(1) - - # connect - write_node.setInput(0, self.previous_node) - self._temp_nodes[product_name].append(write_node) - self.log.debug(f"Write... `{self._temp_nodes[product_name]}`") - # ---------- end nodes creation - - # ---------- render or save to nk - if self.publish_on_farm: - nuke.scriptSave() - path_nk = self.save_file() - self.data.update({ - "bakeScriptPath": path_nk, - "bakeWriteNodeName": write_node.name(), - "bakeRenderPath": self.path - }) - else: - self.render(write_node.name()) - - # ---------- generate representation data - tags = ["review", "need_thumbnail"] - - if delete: - tags.append("delete") - - self.get_representation_data( - tags=tags + add_tags, - custom_tags=add_custom_tags, - range=True, - colorspace=colorspace, - ) - - self.log.debug(f"Representation... `{self.data}`") - - self.clean_nodes(product_name) - nuke.scriptSave() - - return self.data - - def _shift_to_previous_node_and_temp(self, product_name, node, message): - self._temp_nodes[product_name].append(node) - self.previous_node = node - self.log.debug(message.format(self._temp_nodes[product_name])) - - def _connect_to_above_nodes(self, node, product_name, message): - node.setInput(0, self.previous_node) - self._shift_to_previous_node_and_temp(product_name, node, message) - - -def convert_to_valid_instaces(): - """ Check and convert to latest publisher instances - - Also save as new minor version of workfile. - """ - def product_type_to_identifier(product_type): - mapping = { - "render": "create_write_render", - "prerender": "create_write_prerender", - "still": "create_write_image", - "model": "create_model", - "camera": "create_camera", - "nukenodes": "create_backdrop", - "gizmo": "create_gizmo", - "source": "create_source" - - } - return mapping[product_type] - - from ayon_nuke.api import workio - - task_name = get_current_task_name() - - # save into new workfile - current_file = workio.current_file() - - # add file suffex if not - if "_publisherConvert" not in current_file: - new_workfile = ( - current_file[:-3] - + "_publisherConvert" - + current_file[-3:] - ) - else: - new_workfile = current_file - - path = new_workfile.replace("\\", "/") - nuke.scriptSaveAs(new_workfile, overwrite=1) - nuke.Root()["name"].setValue(path) - nuke.Root()["project_directory"].setValue(os.path.dirname(path)) - nuke.Root().setModified(False) - - _remove_old_knobs(nuke.Root()) - - # loop all nodes and convert - for node in nuke.allNodes(recurseGroups=True): - transfer_data = { - "creator_attributes": {} - } - creator_attr = transfer_data["creator_attributes"] - - if node.Class() in ["Viewer", "Dot"]: - continue - - if get_node_data(node, INSTANCE_DATA_KNOB): - continue - - # get data from avalon knob - avalon_knob_data = get_avalon_knob_data( - node, ["avalon:", "ak:"]) - - if not avalon_knob_data: - continue - - if avalon_knob_data["id"] not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - continue - - transfer_data.update({ - k: v for k, v in avalon_knob_data.items() - if k not in ["families", "creator"] - }) - - transfer_data["task"] = task_name - - product_type = avalon_knob_data.get("productType") - if product_type is None: - product_type = avalon_knob_data["family"] - - # establish families - families_ak = avalon_knob_data.get("families", []) - - if "suspend_publish" in node.knobs(): - creator_attr["suspended_publish"] = ( - node["suspend_publish"].value()) - - # get review knob value - if "review" in node.knobs(): - creator_attr["review"] = ( - node["review"].value()) - - if "publish" in node.knobs(): - transfer_data["active"] = ( - node["publish"].value()) - - # add identifier - transfer_data["creator_identifier"] = product_type_to_identifier( - product_type - ) - - # Add all nodes in group instances. - if node.Class() == "Group": - # only alter families for render product type - if families_ak and "write" in families_ak.lower(): - target = node["render"].value() - if target == "Use existing frames": - creator_attr["render_target"] = "frames" - elif target == "Local": - # Local rendering - creator_attr["render_target"] = "local" - elif target == "On farm": - # Farm rendering - creator_attr["render_target"] = "farm" - - if "deadlinePriority" in node.knobs(): - transfer_data["farm_priority"] = ( - node["deadlinePriority"].value()) - if "deadlineChunkSize" in node.knobs(): - creator_attr["farm_chunk"] = ( - node["deadlineChunkSize"].value()) - if "deadlineConcurrentTasks" in node.knobs(): - creator_attr["farm_concurrency"] = ( - node["deadlineConcurrentTasks"].value()) - - _remove_old_knobs(node) - - # add new instance knob with transfer data - set_node_data( - node, INSTANCE_DATA_KNOB, transfer_data) - - nuke.scriptSave() - - -def _remove_old_knobs(node): - remove_knobs = [ - "review", "publish", "render", "suspend_publish", "warn", "divd", - "OpenpypeDataGroup", "OpenpypeDataGroup_End", "deadlinePriority", - "deadlineChunkSize", "deadlineConcurrentTasks", "Deadline" - ] - - # remove all old knobs - for knob in node.allKnobs(): - try: - if knob.name() in remove_knobs: - node.removeKnob(knob) - elif "avalon" in knob.name(): - node.removeKnob(knob) - except ValueError: - pass - - -def exposed_write_knobs(settings, plugin_name, instance_node): - exposed_knobs = settings["nuke"]["create"][plugin_name].get( - "exposed_knobs", [] - ) - if exposed_knobs: - instance_node.addKnob(nuke.Text_Knob('', 'Write Knobs')) - write_node = nuke.allNodes(group=instance_node, filter="Write")[0] - link_knobs(exposed_knobs, write_node, instance_node) diff --git a/server_addon/nuke/client/ayon_nuke/api/push_to_project.py b/server_addon/nuke/client/ayon_nuke/api/push_to_project.py deleted file mode 100644 index 852e5d0e31..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/push_to_project.py +++ /dev/null @@ -1,118 +0,0 @@ -from collections import defaultdict -import shutil -import os - -from ayon_api import get_project, get_folder_by_id, get_task_by_id -from ayon_core.settings import get_project_settings -from ayon_core.pipeline import Anatomy, registered_host -from ayon_core.pipeline.template_data import get_template_data -from ayon_core.pipeline.workfile import get_workdir_with_workdir_data -from ayon_core.tools import context_dialog - -from .utils import bake_gizmos_recursively -from .lib import MENU_LABEL - -import nuke - - -def bake_container(container): - """Bake containers to read nodes.""" - - node = container["node"] - - # Fetch knobs to remove in order. - knobs_to_remove = [] - remove = False - for count in range(0, node.numKnobs()): - knob = node.knob(count) - - # All knobs from "AYON" tab knob onwards. - if knob.name() == MENU_LABEL: - remove = True - - if remove: - knobs_to_remove.append(knob) - - # Dont remove knobs from "containerId" onwards. - if knob.name() == "containerId": - remove = False - - # Knobs needs to be remove in reverse order, because child knobs needs to - # be remove first. - for knob in reversed(knobs_to_remove): - node.removeKnob(knob) - - node["tile_color"].setValue(0) - - -def main(): - context = context_dialog.ask_for_context() - - if context is None: - return - - # Get workfile path to save to. - project_name = context["project_name"] - project = get_project(project_name) - folder = get_folder_by_id(project_name, context["folder_id"]) - task = get_task_by_id(project_name, context["task_id"]) - host = registered_host() - project_settings = get_project_settings(project_name) - anatomy = Anatomy(project_name) - - workdir_data = get_template_data( - project, folder, task, host.name, project_settings - ) - - workdir = get_workdir_with_workdir_data( - workdir_data, - project_name, - anatomy, - project_settings=project_settings - ) - # Save current workfile. - current_file = host.current_file() - host.save_file(current_file) - - for container in host.ls(): - bake_container(container) - - # Bake gizmos. - bake_gizmos_recursively() - - # Copy all read node files to "resources" folder next to workfile and - # change file path. - first_frame = int(nuke.root()["first_frame"].value()) - last_frame = int(nuke.root()["last_frame"].value()) - files_by_node_name = defaultdict(set) - nodes_by_name = {} - for count in range(first_frame, last_frame + 1): - nuke.frame(count) - for node in nuke.allNodes(filter="Read"): - files_by_node_name[node.name()].add( - nuke.filename(node, nuke.REPLACE) - ) - nodes_by_name[node.name()] = node - - resources_dir = os.path.join(workdir, "resources") - for name, files in files_by_node_name.items(): - dir = os.path.join(resources_dir, name) - if not os.path.exists(dir): - os.makedirs(dir) - - for f in files: - shutil.copy(f, os.path.join(dir, os.path.basename(f))) - - node = nodes_by_name[name] - path = node["file"].value().replace(os.path.dirname(f), dir) - node["file"].setValue(path.replace("\\", "/")) - - # Save current workfile to new context. - pushed_workfile = os.path.join( - workdir, os.path.basename(current_file)) - host.save_file(pushed_workfile) - - # Open current context workfile. - host.open_file(current_file) - - nuke.message(f"Pushed to project: \n{pushed_workfile}") diff --git a/server_addon/nuke/client/ayon_nuke/api/utils.py b/server_addon/nuke/client/ayon_nuke/api/utils.py deleted file mode 100644 index 646bb0ece1..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/utils.py +++ /dev/null @@ -1,224 +0,0 @@ -import os -import re - -import nuke - -import pyblish.util -import pyblish.api -from qtpy import QtWidgets - -from ayon_core import resources -from ayon_core.pipeline import registered_host -from ayon_core.tools.utils import show_message_dialog -from ayon_core.pipeline.create import CreateContext - - -def set_context_favorites(favorites=None): - """ Adding favorite folders to nuke's browser - - Arguments: - favorites (dict): couples of {name:path} - """ - favorites = favorites or {} - icon_path = resources.get_resource("icons", "folder-favorite.png") - for name, path in favorites.items(): - nuke.addFavoriteDir( - name, - path, - nuke.IMAGE | nuke.SCRIPT | nuke.GEO, - icon=icon_path) - - -def get_node_outputs(node): - ''' - Return a dictionary of the nodes and pipes that are connected to node - ''' - dep_dict = {} - dependencies = node.dependent(nuke.INPUTS | nuke.HIDDEN_INPUTS) - for d in dependencies: - dep_dict[d] = [] - for i in range(d.inputs()): - if d.input(i) == node: - dep_dict[d].append(i) - return dep_dict - - -def is_node_gizmo(node): - ''' - return True if node is gizmo - ''' - return 'gizmo_file' in node.knobs() - - -def gizmo_is_nuke_default(gizmo): - '''Check if gizmo is in default install path''' - plug_dir = os.path.join(os.path.dirname( - nuke.env['ExecutablePath']), 'plugins') - return gizmo.filename().startswith(plug_dir) - - -def bake_gizmos_recursively(in_group=None): - """Converting a gizmo to group - - Arguments: - is_group (nuke.Node)[optonal]: group node or all nodes - """ - from .lib import maintained_selection - if in_group is None: - in_group = nuke.Root() - # preserve selection after all is done - with maintained_selection(): - # jump to the group - with in_group: - for node in nuke.allNodes(): - if is_node_gizmo(node) and not gizmo_is_nuke_default(node): - with node: - outputs = get_node_outputs(node) - group = node.makeGroup() - # Reconnect inputs and outputs if any - if outputs: - for n, pipes in outputs.items(): - for i in pipes: - n.setInput(i, group) - for i in range(node.inputs()): - group.setInput(i, node.input(i)) - # set node position and name - group.setXYpos(node.xpos(), node.ypos()) - name = node.name() - nuke.delete(node) - group.setName(name) - node = group - - if node.Class() == "Group": - bake_gizmos_recursively(node) - - -def colorspace_exists_on_node(node, colorspace_name): - """ Check if colorspace exists on node - - Look through all options in the colorspace knob, and see if we have an - exact match to one of the items. - - Args: - node (nuke.Node): nuke node object - colorspace_name (str): color profile name - - Returns: - bool: True if exists - """ - try: - colorspace_knob = node['colorspace'] - except ValueError: - # knob is not available on input node - return False - - return colorspace_name in get_colorspace_list(colorspace_knob) - - -def get_colorspace_list(colorspace_knob): - """Get available colorspace profile names - - Args: - colorspace_knob (nuke.Knob): nuke knob object - - Returns: - list: list of strings names of profiles - """ - results = [] - - # This pattern is to match with roles which uses an indentation and - # parentheses with original colorspace. The value returned from the - # colorspace is the string before the indentation, so we'll need to - # convert the values to match with value returned from the knob, - # ei. knob.value(). - pattern = r".*\t.* \(.*\)" - for colorspace in nuke.getColorspaceList(colorspace_knob): - match = re.search(pattern, colorspace) - if match: - results.append(colorspace.split("\t", 1)[0]) - else: - results.append(colorspace) - - return results - - -def is_headless(): - """ - Returns: - bool: headless - """ - return QtWidgets.QApplication.instance() is None - - -def submit_render_on_farm(node): - # Ensure code is executed in root context. - if nuke.root() == nuke.thisNode(): - _submit_render_on_farm(node) - else: - # If not in root context, move to the root context and then execute the - # code. - with nuke.root(): - _submit_render_on_farm(node) - - -def _submit_render_on_farm(node): - """Render on farm submission - - This function prepares the context for farm submission, validates it, - extracts relevant data, copies the current workfile to a timestamped copy, - and submits the job to the farm. - - Args: - node (Node): The node for which the farm submission is being made. - """ - - host = registered_host() - create_context = CreateContext(host) - - # Ensure CreateInstance is enabled. - for instance in create_context.instances: - if node.name() != instance.transient_data["node"].name(): - continue - - instance.data["active"] = True - - context = pyblish.api.Context() - context.data["create_context"] = create_context - # Used in pyblish plugin to determine which instance to publish. - context.data["node_name"] = node.name() - # Used in pyblish plugins to determine whether to run or not. - context.data["render_on_farm"] = True - - # Since we need to bypass version validation and incrementing, we need to - # remove the plugins from the list that are responsible for these tasks. - plugins = pyblish.api.discover() - blacklist = ["IncrementScriptVersion", "ValidateVersion"] - plugins = [ - plugin - for plugin in plugins - if plugin.__name__ not in blacklist - ] - - context = pyblish.util.publish(context, plugins=plugins) - - error_message = "" - success = True - for result in context.data["results"]: - if result["success"]: - continue - - success = False - - err = result["error"] - error_message += "\n" - error_message += err.formatted_traceback - - if not success: - show_message_dialog( - "Publish Errors", error_message, level="critical" - ) - return - - show_message_dialog( - "Submission Successful", "Submission to the farm was successful." - ) diff --git a/server_addon/nuke/client/ayon_nuke/api/workfile_template_builder.py b/server_addon/nuke/client/ayon_nuke/api/workfile_template_builder.py deleted file mode 100644 index aebf91c4a4..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/workfile_template_builder.py +++ /dev/null @@ -1,156 +0,0 @@ -import collections -import nuke - -from ayon_core.pipeline import registered_host -from ayon_core.pipeline.workfile.workfile_template_builder import ( - AbstractTemplateBuilder, - PlaceholderPlugin, -) -from ayon_core.tools.workfile_template_build import ( - WorkfileBuildPlaceholderDialog, -) -from .lib import ( - imprint, - reset_selection, - get_main_window, - WorkfileSettings, -) - -PLACEHOLDER_SET = "PLACEHOLDERS_SET" - - -class NukeTemplateBuilder(AbstractTemplateBuilder): - """Concrete implementation of AbstractTemplateBuilder for nuke""" - - def import_template(self, path): - """Import template into current scene. - Block if a template is already loaded. - - Args: - path (str): A path to current template (usually given by - get_template_preset implementation) - - Returns: - bool: Whether the template was successfully imported or not - """ - - # TODO check if the template is already imported - - nuke.nodePaste(path) - reset_selection() - - return True - - -class NukePlaceholderPlugin(PlaceholderPlugin): - node_color = 4278190335 - - def _collect_scene_placeholders(self): - # Cache placeholder data to shared data - placeholder_nodes = self.builder.get_shared_populate_data( - "placeholder_nodes" - ) - if placeholder_nodes is None: - placeholder_nodes = {} - all_groups = collections.deque() - all_groups.append(nuke.thisGroup()) - while all_groups: - group = all_groups.popleft() - for node in group.nodes(): - if isinstance(node, nuke.Group): - all_groups.append(node) - - node_knobs = node.knobs() - if ( - "is_placeholder" not in node_knobs - or not node.knob("is_placeholder").value() - ): - continue - - if "empty" in node_knobs and node.knob("empty").value(): - continue - - placeholder_nodes[node.fullName()] = node - - self.builder.set_shared_populate_data( - "placeholder_nodes", placeholder_nodes - ) - return placeholder_nodes - - def create_placeholder(self, placeholder_data): - placeholder_data["plugin_identifier"] = self.identifier - - placeholder = nuke.nodes.NoOp() - placeholder.setName("PLACEHOLDER") - placeholder.knob("tile_color").setValue(self.node_color) - - imprint(placeholder, placeholder_data) - imprint(placeholder, {"is_placeholder": True}) - placeholder.knob("is_placeholder").setVisible(False) - - def update_placeholder(self, placeholder_item, placeholder_data): - node = nuke.toNode(placeholder_item.scene_identifier) - imprint(node, placeholder_data) - - def _parse_placeholder_node_data(self, node): - placeholder_data = {} - for key in self.get_placeholder_keys(): - knob = node.knob(key) - value = None - if knob is not None: - value = knob.getValue() - placeholder_data[key] = value - return placeholder_data - - def delete_placeholder(self, placeholder): - """Remove placeholder if building was successful""" - placeholder_node = nuke.toNode(placeholder.scene_identifier) - nuke.delete(placeholder_node) - - -def build_workfile_template(*args, **kwargs): - builder = NukeTemplateBuilder(registered_host()) - builder.build_template(*args, **kwargs) - - # set all settings to shot context default - WorkfileSettings().set_context_settings() - - -def update_workfile_template(*args): - builder = NukeTemplateBuilder(registered_host()) - builder.rebuild_template() - - -def create_placeholder(*args): - host = registered_host() - builder = NukeTemplateBuilder(host) - window = WorkfileBuildPlaceholderDialog(host, builder, - parent=get_main_window()) - window.show() - - -def update_placeholder(*args): - host = registered_host() - builder = NukeTemplateBuilder(host) - placeholder_items_by_id = { - placeholder_item.scene_identifier: placeholder_item - for placeholder_item in builder.get_placeholders() - } - placeholder_items = [] - for node in nuke.selectedNodes(): - node_name = node.fullName() - if node_name in placeholder_items_by_id: - placeholder_items.append(placeholder_items_by_id[node_name]) - - # TODO show UI at least - if len(placeholder_items) == 0: - raise ValueError("No node selected") - - if len(placeholder_items) > 1: - raise ValueError("Too many selected nodes") - - placeholder_item = placeholder_items[0] - window = WorkfileBuildPlaceholderDialog(host, builder, - parent=get_main_window()) - window.set_update_mode(placeholder_item) - window.exec_() diff --git a/server_addon/nuke/client/ayon_nuke/api/workio.py b/server_addon/nuke/client/ayon_nuke/api/workio.py deleted file mode 100644 index b2445fd3d2..0000000000 --- a/server_addon/nuke/client/ayon_nuke/api/workio.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Host API required Work Files tool""" -import os -import nuke -import shutil -from .utils import is_headless - - -def file_extensions(): - return [".nk"] - - -def has_unsaved_changes(): - return nuke.root().modified() - - -def save_file(filepath): - path = filepath.replace("\\", "/") - nuke.scriptSaveAs(path, overwrite=1) - nuke.Root()["name"].setValue(path) - nuke.Root()["project_directory"].setValue(os.path.dirname(path)) - nuke.Root().setModified(False) - - -def open_file(filepath): - - def read_script(nuke_script): - nuke.scriptClear() - nuke.scriptReadFile(nuke_script) - nuke.Root()["name"].setValue(nuke_script) - nuke.Root()["project_directory"].setValue(os.path.dirname(nuke_script)) - nuke.Root().setModified(False) - - filepath = filepath.replace("\\", "/") - - # To remain in the same window, we have to clear the script and read - # in the contents of the workfile. - # Nuke Preferences can be read after the script is read. - read_script(filepath) - - if not is_headless(): - autosave = nuke.toNode("preferences")["AutoSaveName"].evaluate() - autosave_prmpt = "Autosave detected.\n" \ - "Would you like to load the autosave file?" # noqa - if os.path.isfile(autosave) and nuke.ask(autosave_prmpt): - try: - # Overwrite the filepath with autosave - shutil.copy(autosave, filepath) - # Now read the (auto-saved) script again - read_script(filepath) - except shutil.Error as err: - nuke.message( - "Detected autosave file could not be used.\n{}" - - .format(err)) - - return True - - -def current_file(): - current_file = nuke.root().name() - - # Unsaved current file - if current_file == 'Root': - return None - - return os.path.normpath(current_file).replace("\\", "/") - - -def work_root(session): - - work_dir = session["AYON_WORKDIR"] - scene_dir = session.get("AVALON_SCENEDIR") - if scene_dir: - path = os.path.join(work_dir, scene_dir) - else: - path = work_dir - - return os.path.normpath(path).replace("\\", "/") diff --git a/server_addon/nuke/client/ayon_nuke/hooks/pre_nukeassist_setup.py b/server_addon/nuke/client/ayon_nuke/hooks/pre_nukeassist_setup.py deleted file mode 100644 index afef3ba843..0000000000 --- a/server_addon/nuke/client/ayon_nuke/hooks/pre_nukeassist_setup.py +++ /dev/null @@ -1,12 +0,0 @@ -from ayon_applications import PreLaunchHook - - -class PrelaunchNukeAssistHook(PreLaunchHook): - """ - Adding flag when nukeassist - """ - app_groups = {"nukeassist"} - launch_types = set() - - def execute(self): - self.launch_context.env["NUKEASSIST"] = "1" diff --git a/server_addon/nuke/client/ayon_nuke/plugins/__init__.py b/server_addon/nuke/client/ayon_nuke/plugins/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/__init__.py b/server_addon/nuke/client/ayon_nuke/plugins/create/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/convert_legacy.py b/server_addon/nuke/client/ayon_nuke/plugins/create/convert_legacy.py deleted file mode 100644 index 65e719d15b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/convert_legacy.py +++ /dev/null @@ -1,55 +0,0 @@ -from ayon_core.pipeline import AYON_INSTANCE_ID, AVALON_INSTANCE_ID -from ayon_core.pipeline.create.creator_plugins import ProductConvertorPlugin -from ayon_nuke.api.lib import ( - INSTANCE_DATA_KNOB, - get_node_data, - get_avalon_knob_data, - NODE_TAB_NAME, -) -from ayon_nuke.api.plugin import convert_to_valid_instaces - -import nuke - - -class LegacyConverted(ProductConvertorPlugin): - identifier = "legacy.converter" - - def find_instances(self): - - legacy_found = False - # search for first available legacy item - for node in nuke.allNodes(recurseGroups=True): - if node.Class() in ["Viewer", "Dot"]: - continue - - if get_node_data(node, INSTANCE_DATA_KNOB): - continue - - if NODE_TAB_NAME not in node.knobs(): - continue - - # get data from avalon knob - avalon_knob_data = get_avalon_knob_data( - node, ["avalon:", "ak:"], create=False) - - if not avalon_knob_data: - continue - - if avalon_knob_data["id"] not in { - AYON_INSTANCE_ID, AVALON_INSTANCE_ID - }: - continue - - # catch and break - legacy_found = True - break - - if legacy_found: - # if not item do not add legacy instance converter - self.add_convertor_item("Convert legacy instances") - - def convert(self): - # loop all instances and convert them - convert_to_valid_instaces() - # remove legacy item if all is fine - self.remove_convertor_item() diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_backdrop.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_backdrop.py deleted file mode 100644 index f97b9efeb6..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_backdrop.py +++ /dev/null @@ -1,53 +0,0 @@ -from nukescripts import autoBackdrop - -from ayon_nuke.api import ( - NukeCreator, - maintained_selection, - select_nodes -) - - -class CreateBackdrop(NukeCreator): - """Add Publishable Backdrop""" - - settings_category = "nuke" - - identifier = "create_backdrop" - label = "Nukenodes (backdrop)" - product_type = "nukenodes" - icon = "file-archive-o" - maintain_selection = True - - # plugin attributes - node_color = "0xdfea5dff" - - def create_instance_node( - self, - node_name, - knobs=None, - parent=None, - node_type=None - ): - with maintained_selection(): - if len(self.selected_nodes) >= 1: - select_nodes(self.selected_nodes) - - created_node = autoBackdrop() - created_node["name"].setValue(node_name) - created_node["tile_color"].setValue(int(self.node_color, 16)) - created_node["note_font_size"].setValue(24) - created_node["label"].setValue("[{}]".format(node_name)) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # make sure product name is unique - self.check_existing_product(product_name) - - instance = super(CreateBackdrop, self).create( - product_name, - instance_data, - pre_create_data - ) - - return instance diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_camera.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_camera.py deleted file mode 100644 index 69e5b9c676..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_camera.py +++ /dev/null @@ -1,71 +0,0 @@ -import nuke -from ayon_nuke.api import ( - NukeCreator, - NukeCreatorError, - maintained_selection -) -from ayon_nuke.api.lib import ( - create_camera_node_by_version -) - - -class CreateCamera(NukeCreator): - """Add Publishable Camera""" - - settings_category = "nuke" - - identifier = "create_camera" - label = "Camera (3d)" - product_type = "camera" - icon = "camera" - - # plugin attributes - node_color = "0xff9100ff" - - def create_instance_node( - self, - node_name, - knobs=None, - parent=None, - node_type=None - ): - with maintained_selection(): - if self.selected_nodes: - node = self.selected_nodes[0] - if node.Class() != "Camera3": - raise NukeCreatorError( - "Creator error: Select only camera node type") - created_node = self.selected_nodes[0] - else: - created_node = create_camera_node_by_version() - - created_node["tile_color"].setValue( - int(self.node_color, 16)) - - created_node["name"].setValue(node_name) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # make sure product name is unique - self.check_existing_product(product_name) - - instance = super(CreateCamera, self).create( - product_name, - instance_data, - pre_create_data - ) - - return instance - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - if self.selected_nodes == []: - raise NukeCreatorError( - "Creator error: No active selection") - elif len(self.selected_nodes) > 1: - raise NukeCreatorError( - "Creator error: Select only one camera node") - else: - self.selected_nodes = [] diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_gizmo.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_gizmo.py deleted file mode 100644 index 6be7cd58db..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_gizmo.py +++ /dev/null @@ -1,67 +0,0 @@ -import nuke -from ayon_nuke.api import ( - NukeCreator, - NukeCreatorError, - maintained_selection -) - - -class CreateGizmo(NukeCreator): - """Add Publishable Group as gizmo""" - - settings_category = "nuke" - - identifier = "create_gizmo" - label = "Gizmo (group)" - product_type = "gizmo" - icon = "file-archive-o" - default_variants = ["ViewerInput", "Lut", "Effect"] - - # plugin attributes - node_color = "0x7533c1ff" - - def create_instance_node( - self, - node_name, - knobs=None, - parent=None, - node_type=None - ): - with maintained_selection(): - if self.selected_nodes: - node = self.selected_nodes[0] - if node.Class() != "Group": - raise NukeCreatorError( - "Creator error: Select only 'Group' node type") - created_node = node - else: - created_node = nuke.collapseToGroup() - - created_node["tile_color"].setValue( - int(self.node_color, 16)) - - created_node["name"].setValue(node_name) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # make sure product name is unique - self.check_existing_product(product_name) - - instance = super(CreateGizmo, self).create( - product_name, - instance_data, - pre_create_data - ) - - return instance - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - if self.selected_nodes == []: - raise NukeCreatorError("Creator error: No active selection") - elif len(self.selected_nodes) > 1: - NukeCreatorError("Creator error: Select only one 'Group' node") - else: - self.selected_nodes = [] diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_model.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_model.py deleted file mode 100644 index b7d7b740c2..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_model.py +++ /dev/null @@ -1,67 +0,0 @@ -import nuke -from ayon_nuke.api import ( - NukeCreator, - NukeCreatorError, - maintained_selection -) - - -class CreateModel(NukeCreator): - """Add Publishable Camera""" - - settings_category = "nuke" - - identifier = "create_model" - label = "Model (3d)" - product_type = "model" - icon = "cube" - default_variants = ["Main"] - - # plugin attributes - node_color = "0xff3200ff" - - def create_instance_node( - self, - node_name, - knobs=None, - parent=None, - node_type=None - ): - with maintained_selection(): - if self.selected_nodes: - node = self.selected_nodes[0] - if node.Class() != "Scene": - raise NukeCreatorError( - "Creator error: Select only 'Scene' node type") - created_node = node - else: - created_node = nuke.createNode("Scene") - - created_node["tile_color"].setValue( - int(self.node_color, 16)) - - created_node["name"].setValue(node_name) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # make sure product name is unique - self.check_existing_product(product_name) - - instance = super(CreateModel, self).create( - product_name, - instance_data, - pre_create_data - ) - - return instance - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - if self.selected_nodes == []: - raise NukeCreatorError("Creator error: No active selection") - elif len(self.selected_nodes) > 1: - NukeCreatorError("Creator error: Select only one 'Scene' node") - else: - self.selected_nodes = [] diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_source.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_source.py deleted file mode 100644 index 1579cebb1d..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_source.py +++ /dev/null @@ -1,90 +0,0 @@ -import nuke -import six -import sys -from ayon_nuke.api import ( - INSTANCE_DATA_KNOB, - NukeCreator, - NukeCreatorError, - set_node_data -) -from ayon_core.pipeline import ( - CreatedInstance -) - - -class CreateSource(NukeCreator): - """Add Publishable Read with source""" - - settings_category = "nuke" - - identifier = "create_source" - label = "Source (read)" - product_type = "source" - icon = "film" - default_variants = ["Effect", "Backplate", "Fire", "Smoke"] - - # plugin attributes - node_color = "0xff9100ff" - - def create_instance_node( - self, - node_name, - read_node - ): - read_node["tile_color"].setValue( - int(self.node_color, 16)) - read_node["name"].setValue(node_name) - - return read_node - - def create(self, product_name, instance_data, pre_create_data): - - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - try: - for read_node in self.selected_nodes: - if read_node.Class() != 'Read': - continue - - node_name = read_node.name() - _product_name = product_name + node_name - - # make sure product name is unique - self.check_existing_product(_product_name) - - instance_node = self.create_instance_node( - _product_name, - read_node - ) - instance = CreatedInstance( - self.product_type, - _product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - set_node_data( - instance_node, - INSTANCE_DATA_KNOB, - instance.data_to_store() - ) - - except Exception as er: - six.reraise( - NukeCreatorError, - NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2]) - - def set_selected_nodes(self, pre_create_data): - if pre_create_data.get("use_selection"): - self.selected_nodes = nuke.selectedNodes() - if self.selected_nodes == []: - raise NukeCreatorError("Creator error: No active selection") - else: - NukeCreatorError( - "Creator error: only supported with active selection") diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_image.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_image.py deleted file mode 100644 index 2268817e76..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_image.py +++ /dev/null @@ -1,174 +0,0 @@ -import nuke -import sys -import six - -from ayon_core.pipeline import ( - CreatedInstance -) -from ayon_core.lib import ( - BoolDef, - NumberDef, - UISeparatorDef, - EnumDef -) -from ayon_nuke import api as napi -from ayon_nuke.api.plugin import exposed_write_knobs - - -class CreateWriteImage(napi.NukeWriteCreator): - - settings_category = "nuke" - - identifier = "create_write_image" - label = "Image (write)" - product_type = "image" - icon = "sign-out" - - instance_attributes = [ - "use_range_limit" - ] - default_variants = [ - "StillFrame", - "MPFrame", - "LayoutFrame" - ] - temp_rendering_path_template = ( - "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") - - def get_pre_create_attr_defs(self): - attr_defs = [ - BoolDef( - "use_selection", - default=not self.create_context.headless, - label="Use selection" - ), - self._get_render_target_enum(), - UISeparatorDef(), - self._get_frame_source_number() - ] - return attr_defs - - def _get_render_target_enum(self): - rendering_targets = { - "local": "Local machine rendering", - "frames": "Use existing frames" - } - - return EnumDef( - "render_target", - items=rendering_targets, - label="Render target" - ) - - def _get_frame_source_number(self): - return NumberDef( - "active_frame", - label="Active frame", - default=nuke.frame() - ) - - def create_instance_node(self, product_name, instance_data): - settings = self.project_settings["nuke"]["create"]["CreateWriteImage"] - - # add fpath_template - write_data = { - "creator": self.__class__.__name__, - "productName": product_name, - "fpath_template": self.temp_rendering_path_template, - "render_on_farm": ( - "render_on_farm" in settings["instance_attributes"] - ) - } - write_data.update(instance_data) - - created_node = napi.create_write_node( - product_name, - write_data, - input=self.selected_node, - prenodes=self.prenodes, - linked_knobs=self.get_linked_knobs(), - **{ - "frame": nuke.frame() - } - ) - - self._add_frame_range_limit(created_node, instance_data) - - self.integrate_links(created_node, outputs=True) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - product_name = product_name.format(**pre_create_data) - - # pass values from precreate to instance - self.pass_pre_attributes_to_instance( - instance_data, - pre_create_data, - [ - "active_frame", - "render_target" - ] - ) - - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - # make sure product name is unique - self.check_existing_product(product_name) - - instance_node = self.create_instance_node( - product_name, - instance_data, - ) - - try: - instance = CreatedInstance( - self.product_type, - product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - napi.set_node_data( - instance_node, - napi.INSTANCE_DATA_KNOB, - instance.data_to_store() - ) - - exposed_write_knobs( - self.project_settings, self.__class__.__name__, instance_node - ) - - return instance - - except Exception as er: - six.reraise( - napi.NukeCreatorError, - napi.NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2] - ) - - def _add_frame_range_limit(self, write_node, instance_data): - if "use_range_limit" not in self.instance_attributes: - return - - active_frame = ( - instance_data["creator_attributes"].get("active_frame")) - - write_node.begin() - for n in nuke.allNodes(): - # get write node - if n.Class() in "Write": - w_node = n - write_node.end() - - w_node["use_limit"].setValue(True) - w_node["first"].setValue(active_frame or nuke.frame()) - w_node["last"].setExpression("first") - - return write_node diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_prerender.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_prerender.py deleted file mode 100644 index 014e91e81c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_prerender.py +++ /dev/null @@ -1,160 +0,0 @@ -import nuke -import sys -import six - -from ayon_core.pipeline import ( - CreatedInstance -) -from ayon_core.lib import ( - BoolDef -) -from ayon_nuke import api as napi -from ayon_nuke.api.plugin import exposed_write_knobs - - -class CreateWritePrerender(napi.NukeWriteCreator): - - settings_category = "nuke" - - identifier = "create_write_prerender" - label = "Prerender (write)" - product_type = "prerender" - icon = "sign-out" - - instance_attributes = [ - "use_range_limit" - ] - default_variants = [ - "Key01", - "Bg01", - "Fg01", - "Branch01", - "Part01" - ] - temp_rendering_path_template = ( - "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") - - # Before write node render. - order = 90 - - def get_pre_create_attr_defs(self): - attr_defs = [ - BoolDef( - "use_selection", - default=not self.create_context.headless, - label="Use selection" - ), - self._get_render_target_enum() - ] - return attr_defs - - def create_instance_node(self, product_name, instance_data): - settings = self.project_settings["nuke"]["create"] - settings = settings["CreateWritePrerender"] - - # add fpath_template - write_data = { - "creator": self.__class__.__name__, - "productName": product_name, - "fpath_template": self.temp_rendering_path_template, - "render_on_farm": ( - "render_on_farm" in settings["instance_attributes"] - ) - } - - write_data.update(instance_data) - - # get width and height - if self.selected_node: - width, height = ( - self.selected_node.width(), self.selected_node.height()) - else: - actual_format = nuke.root().knob('format').value() - width, height = (actual_format.width(), actual_format.height()) - - created_node = napi.create_write_node( - product_name, - write_data, - input=self.selected_node, - prenodes=self.prenodes, - linked_knobs=self.get_linked_knobs(), - **{ - "width": width, - "height": height - } - ) - - self._add_frame_range_limit(created_node) - - self.integrate_links(created_node, outputs=True) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # pass values from precreate to instance - self.pass_pre_attributes_to_instance( - instance_data, - pre_create_data, - [ - "render_target" - ] - ) - - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - # make sure product name is unique - self.check_existing_product(product_name) - - instance_node = self.create_instance_node( - product_name, - instance_data - ) - - try: - instance = CreatedInstance( - self.product_type, - product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - napi.set_node_data( - instance_node, - napi.INSTANCE_DATA_KNOB, - instance.data_to_store() - ) - - exposed_write_knobs( - self.project_settings, self.__class__.__name__, instance_node - ) - - return instance - - except Exception as er: - six.reraise( - napi.NukeCreatorError, - napi.NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2] - ) - - def _add_frame_range_limit(self, write_node): - if "use_range_limit" not in self.instance_attributes: - return - - write_node.begin() - for n in nuke.allNodes(): - # get write node - if n.Class() in "Write": - w_node = n - write_node.end() - - w_node["use_limit"].setValue(True) - w_node["first"].setValue(nuke.root()["first_frame"].value()) - w_node["last"].setValue(nuke.root()["last_frame"].value()) - - return write_node diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_render.py b/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_render.py deleted file mode 100644 index bed081c882..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/create_write_render.py +++ /dev/null @@ -1,136 +0,0 @@ -import nuke -import sys -import six - -from ayon_core.pipeline import ( - CreatedInstance -) -from ayon_core.lib import ( - BoolDef -) -from ayon_nuke import api as napi -from ayon_nuke.api.plugin import exposed_write_knobs - - -class CreateWriteRender(napi.NukeWriteCreator): - - settings_category = "nuke" - - identifier = "create_write_render" - label = "Render (write)" - product_type = "render" - icon = "sign-out" - - instance_attributes = [ - "reviewable" - ] - default_variants = [ - "Main", - "Mask" - ] - temp_rendering_path_template = ( - "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") - - def get_pre_create_attr_defs(self): - attr_defs = [ - BoolDef( - "use_selection", - default=not self.create_context.headless, - label="Use selection" - ), - self._get_render_target_enum() - ] - return attr_defs - - def create_instance_node(self, product_name, instance_data): - settings = self.project_settings["nuke"]["create"]["CreateWriteRender"] - - # add fpath_template - write_data = { - "creator": self.__class__.__name__, - "productName": product_name, - "fpath_template": self.temp_rendering_path_template, - "render_on_farm": ( - "render_on_farm" in settings["instance_attributes"] - ) - } - - write_data.update(instance_data) - - # get width and height - if self.selected_node: - width, height = ( - self.selected_node.width(), self.selected_node.height()) - else: - actual_format = nuke.root().knob('format').value() - width, height = (actual_format.width(), actual_format.height()) - - self.log.debug(">>>>>>> : {}".format(self.instance_attributes)) - self.log.debug(">>>>>>> : {}".format(self.get_linked_knobs())) - - created_node = napi.create_write_node( - product_name, - write_data, - input=self.selected_node, - prenodes=self.prenodes, - linked_knobs=self.get_linked_knobs(), - **{ - "width": width, - "height": height - } - ) - - self.integrate_links(created_node, outputs=False) - - return created_node - - def create(self, product_name, instance_data, pre_create_data): - # pass values from precreate to instance - self.pass_pre_attributes_to_instance( - instance_data, - pre_create_data, - [ - "render_target" - ] - ) - # make sure selected nodes are added - self.set_selected_nodes(pre_create_data) - - # make sure product name is unique - self.check_existing_product(product_name) - - instance_node = self.create_instance_node( - product_name, - instance_data - ) - - try: - instance = CreatedInstance( - self.product_type, - product_name, - instance_data, - self - ) - - instance.transient_data["node"] = instance_node - - self._add_instance_to_context(instance) - - napi.set_node_data( - instance_node, - napi.INSTANCE_DATA_KNOB, - instance.data_to_store() - ) - - exposed_write_knobs( - self.project_settings, self.__class__.__name__, instance_node - ) - - return instance - - except Exception as er: - six.reraise( - napi.NukeCreatorError, - napi.NukeCreatorError("Creator error: {}".format(er)), - sys.exc_info()[2] - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/create/workfile_creator.py b/server_addon/nuke/client/ayon_nuke/plugins/create/workfile_creator.py deleted file mode 100644 index 463d898224..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/create/workfile_creator.py +++ /dev/null @@ -1,84 +0,0 @@ -import ayon_api - -import ayon_nuke.api as api -from ayon_core.pipeline import ( - AutoCreator, - CreatedInstance, -) -from ayon_nuke.api import ( - INSTANCE_DATA_KNOB, - set_node_data -) -import nuke - - -class WorkfileCreator(AutoCreator): - - settings_category = "nuke" - - identifier = "workfile" - product_type = "workfile" - - default_variant = "Main" - - def get_instance_attr_defs(self): - return [] - - def collect_instances(self): - root_node = nuke.root() - instance_data = api.get_node_data( - root_node, api.INSTANCE_DATA_KNOB - ) - - project_name = self.create_context.get_current_project_name() - folder_path = self.create_context.get_current_folder_path() - task_name = self.create_context.get_current_task_name() - host_name = self.create_context.host_name - - folder_entity = ayon_api.get_folder_by_path( - project_name, folder_path - ) - task_entity = ayon_api.get_task_by_name( - project_name, folder_entity["id"], task_name - ) - product_name = self.get_product_name( - project_name, - folder_entity, - task_entity, - self.default_variant, - host_name, - ) - instance_data.update({ - "folderPath": folder_path, - "task": task_name, - "variant": self.default_variant - }) - instance_data.update(self.get_dynamic_data( - project_name, - folder_entity, - task_entity, - self.default_variant, - host_name, - instance_data - )) - - instance = CreatedInstance( - self.product_type, product_name, instance_data, self - ) - instance.transient_data["node"] = root_node - self._add_instance_to_context(instance) - - def update_instances(self, update_list): - for created_inst, _changes in update_list: - instance_node = created_inst.transient_data["node"] - - set_node_data( - instance_node, - INSTANCE_DATA_KNOB, - created_inst.data_to_store() - ) - - def create(self, options=None): - # no need to create if it is created - # in `collect_instances` - pass diff --git a/server_addon/nuke/client/ayon_nuke/plugins/inventory/repair_old_loaders.py b/server_addon/nuke/client/ayon_nuke/plugins/inventory/repair_old_loaders.py deleted file mode 100644 index 11d65d4b8c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/inventory/repair_old_loaders.py +++ /dev/null @@ -1,36 +0,0 @@ -from ayon_core.lib import Logger -from ayon_core.pipeline import InventoryAction -from ayon_nuke.api.lib import set_avalon_knob_data - - -class RepairOldLoaders(InventoryAction): - - label = "Repair Old Loaders" - icon = "gears" - color = "#cc0000" - - log = Logger.get_logger(__name__) - - def process(self, containers): - import nuke - new_loader = "LoadClip" - - for cdata in containers: - orig_loader = cdata["loader"] - orig_name = cdata["objectName"] - if orig_loader not in ["LoadSequence", "LoadMov"]: - self.log.warning( - "This repair action is only working on " - "`LoadSequence` and `LoadMov` Loaders") - continue - - new_name = orig_name.replace(orig_loader, new_loader) - node = nuke.toNode(cdata["objectName"]) - - cdata.update({ - "loader": new_loader, - "objectName": new_name - }) - node["name"].setValue(new_name) - # get data from avalon knob - set_avalon_knob_data(node, cdata) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/inventory/select_containers.py b/server_addon/nuke/client/ayon_nuke/plugins/inventory/select_containers.py deleted file mode 100644 index f67c8c16e9..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/inventory/select_containers.py +++ /dev/null @@ -1,21 +0,0 @@ -from ayon_core.pipeline import InventoryAction -from ayon_nuke.api.command import viewer_update_and_undo_stop - - -class SelectContainers(InventoryAction): - - label = "Select Containers" - icon = "mouse-pointer" - color = "#d8d8d8" - - def process(self, containers): - import nuke - - nodes = [nuke.toNode(i["objectName"]) for i in containers] - - with viewer_update_and_undo_stop(): - # clear previous_selection - [n['selected'].setValue(False) for n in nodes] - # Select tool - for node in nodes: - node["selected"].setValue(True) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/actions.py b/server_addon/nuke/client/ayon_nuke/plugins/load/actions.py deleted file mode 100644 index a4e2b156a3..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/actions.py +++ /dev/null @@ -1,77 +0,0 @@ -"""A module containing generic loader actions that will display in the Loader. - -""" - -from ayon_core.lib import Logger -from ayon_core.pipeline import load -from ayon_nuke.api import lib - -log = Logger.get_logger(__name__) - - -class SetFrameRangeLoader(load.LoaderPlugin): - """Set frame range excluding pre- and post-handles""" - - product_types = { - "animation", - "camera", - "write", - "yeticache", - "pointcache", - } - representations = {"*"} - extensions = {"*"} - - label = "Set frame range" - order = 11 - icon = "clock-o" - color = "white" - - def load(self, context, name, namespace, data): - version_entity = context["version"] - version_attributes = version_entity["attrib"] - - start = version_attributes.get("frameStart") - end = version_attributes.get("frameEnd") - - log.info("start: {}, end: {}".format(start, end)) - if start is None or end is None: - log.info("Skipping setting frame range because start or " - "end frame data is missing..") - return - - lib.update_frame_range(start, end) - - -class SetFrameRangeWithHandlesLoader(load.LoaderPlugin): - """Set frame range including pre- and post-handles""" - - product_types = { - "animation", - "camera", - "write", - "yeticache", - "pointcache", - } - representations = {"*"} - - label = "Set frame range (with handles)" - order = 12 - icon = "clock-o" - color = "white" - - def load(self, context, name, namespace, data): - version_attributes = context["version"]["attrib"] - start = version_attributes.get("frameStart") - end = version_attributes.get("frameEnd") - - if start is None or end is None: - print("Skipping setting frame range because start or " - "end frame data is missing..") - return - - # Include handles - start -= version_attributes.get("handleStart", 0) - end += version_attributes.get("handleEnd", 0) - - lib.update_frame_range(start, end) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_backdrop.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_backdrop.py deleted file mode 100644 index 054a56d041..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_backdrop.py +++ /dev/null @@ -1,255 +0,0 @@ -import nuke -import nukescripts -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import ( - find_free_space_to_paste_nodes, - maintained_selection, - reset_selection, - select_nodes, - get_avalon_knob_data, - set_avalon_knob_data -) -from ayon_nuke.api.command import viewer_update_and_undo_stop -from ayon_nuke.api import containerise, update_container - - -class LoadBackdropNodes(load.LoaderPlugin): - """Loading Published Backdrop nodes (workfile, nukenodes)""" - - product_types = {"workfile", "nukenodes"} - representations = {"*"} - extensions = {"nk"} - - settings_category = "nuke" - - label = "Import Nuke Nodes" - order = 0 - icon = "eye" - color = "white" - node_color = "0x7533c1ff" - - def load(self, context, name, namespace, data): - """ - Loading function to import .nk file into script and wrap - it on backdrop - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke node: containerised nuke node object - """ - - # get main variables - namespace = namespace or context["folder"]["name"] - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - colorspace = version_attributes.get("colorSpace") - - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - data_imprint = { - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - # add attributes from the version to imprint to metadata knob - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - # Get mouse position - n = nuke.createNode("NoOp") - xcursor, ycursor = (n.xpos(), n.ypos()) - reset_selection() - nuke.delete(n) - - bdn_frame = 50 - - with maintained_selection(): - - # add group from nk - nuke.nodePaste(file) - - # get all pasted nodes - new_nodes = list() - nodes = nuke.selectedNodes() - - # get pointer position in DAG - xpointer, ypointer = find_free_space_to_paste_nodes( - nodes, direction="right", offset=200 + bdn_frame - ) - - # reset position to all nodes and replace inputs and output - for n in nodes: - reset_selection() - xpos = (n.xpos() - xcursor) + xpointer - ypos = (n.ypos() - ycursor) + ypointer - n.setXYpos(xpos, ypos) - - # replace Input nodes for dots - if n.Class() in "Input": - dot = nuke.createNode("Dot") - new_name = n.name().replace("INP", "DOT") - dot.setName(new_name) - dot["label"].setValue(new_name) - dot.setXYpos(xpos, ypos) - new_nodes.append(dot) - - # rewire - dep = n.dependent() - for d in dep: - index = next((i for i, dpcy in enumerate( - d.dependencies()) - if n is dpcy), 0) - d.setInput(index, dot) - - # remove Input node - reset_selection() - nuke.delete(n) - continue - - # replace Input nodes for dots - elif n.Class() in "Output": - dot = nuke.createNode("Dot") - new_name = n.name() + "_DOT" - dot.setName(new_name) - dot["label"].setValue(new_name) - dot.setXYpos(xpos, ypos) - new_nodes.append(dot) - - # rewire - dep = next((d for d in n.dependencies()), None) - if dep: - dot.setInput(0, dep) - - # remove Input node - reset_selection() - nuke.delete(n) - continue - else: - new_nodes.append(n) - - # reselect nodes with new Dot instead of Inputs and Output - reset_selection() - select_nodes(new_nodes) - # place on backdrop - bdn = nukescripts.autoBackdrop() - - # add frame offset - xpos = bdn.xpos() - bdn_frame - ypos = bdn.ypos() - bdn_frame - bdwidth = bdn["bdwidth"].value() + (bdn_frame*2) - bdheight = bdn["bdheight"].value() + (bdn_frame*2) - - bdn["xpos"].setValue(xpos) - bdn["ypos"].setValue(ypos) - bdn["bdwidth"].setValue(bdwidth) - bdn["bdheight"].setValue(bdheight) - - bdn["name"].setValue(object_name) - bdn["label"].setValue("Version tracked frame: \n`{}`\n\nPLEASE DO NOT REMOVE OR MOVE \nANYTHING FROM THIS FRAME!".format(object_name)) - bdn["note_font_size"].setValue(20) - - return containerise( - node=bdn, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - # get main variables - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - # get corresponding node - GN = container["node"] - - file = get_representation_path(repre_entity).replace("\\", "/") - - name = container["name"] - namespace = container["namespace"] - object_name = "{}_{}".format(name, namespace) - - version_attributes = version_entity["attrib"] - colorspace = version_attributes.get("colorSpace") - - data_imprint = { - "representation": repre_entity["id"], - "version": version_entity["version"], - "colorspaceInput": colorspace, - } - - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - with maintained_selection(): - xpos = GN.xpos() - ypos = GN.ypos() - avalon_data = get_avalon_knob_data(GN) - nuke.delete(GN) - # add group from nk - nuke.nodePaste(file) - - GN = nuke.selectedNode() - set_avalon_knob_data(GN, avalon_data) - GN.setXYpos(xpos, ypos) - GN["name"].setValue(object_name) - - # get all versions in list - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.node_color - else: - color_value = "0xd88467ff" - GN["tile_color"].setValue(int(color_value, 16)) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - return update_container(GN, data_imprint) - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_camera_abc.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_camera_abc.py deleted file mode 100644 index 3930cf52fa..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_camera_abc.py +++ /dev/null @@ -1,198 +0,0 @@ -import nuke -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) -from ayon_nuke.api.lib import ( - maintained_selection -) - - -class AlembicCameraLoader(load.LoaderPlugin): - """ - This will load alembic camera into script. - """ - - product_types = {"camera"} - representations = {"*"} - extensions = {"abc"} - - settings_category = "nuke" - - label = "Load Alembic Camera" - icon = "camera" - color = "orange" - node_color = "0x3469ffff" - - def load(self, context, name, namespace, data): - # get main variables - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() - - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - # add additional metadata from the version to imprint to metadata knob - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - } - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - with maintained_selection(): - camera_node = nuke.createNode( - "Camera2", - "name {} file {} read_from_file True".format( - object_name, file), - inpanel=False - ) - - camera_node.forceValidate() - camera_node["frame_rate"].setValue(float(fps)) - - # workaround because nuke's bug is not adding - # animation keys properly - xpos = camera_node.xpos() - ypos = camera_node.ypos() - nuke.nodeCopy("%clipboard%") - nuke.delete(camera_node) - nuke.nodePaste("%clipboard%") - camera_node = nuke.toNode(object_name) - camera_node.setXYpos(xpos, ypos) - - # color node by correct color by actual version - self.node_version_color( - context["project"]["name"], version_entity, camera_node - ) - - return containerise( - node=camera_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """ - Called by Scene Inventory when look should be updated to current - version. - If any reference edits cannot be applied, eg. shader renamed and - material not present, reference is unloaded and cleaned. - All failed edits are highlighted to the user via message box. - - Args: - container: object that has look to be updated - representation: (dict): relationship data to get proper - representation from DB and persisted - data in .json - Returns: - None - """ - # Get version from io - version_entity = context["version"] - repre_entity = context["representation"] - - # get main variables - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() - - # prepare data for imprinting - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"] - } - - # add attributes from the version to imprint to metadata knob - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = get_representation_path(repre_entity).replace("\\", "/") - - with maintained_selection(): - camera_node = container["node"] - camera_node['selected'].setValue(True) - - # collect input output dependencies - dependencies = camera_node.dependencies() - dependent = camera_node.dependent() - - camera_node["frame_rate"].setValue(float(fps)) - camera_node["file"].setValue(file) - - # workaround because nuke's bug is - # not adding animation keys properly - xpos = camera_node.xpos() - ypos = camera_node.ypos() - nuke.nodeCopy("%clipboard%") - camera_name = camera_node.name() - nuke.delete(camera_node) - nuke.nodePaste("%clipboard%") - camera_node = nuke.toNode(camera_name) - camera_node.setXYpos(xpos, ypos) - - # link to original input nodes - for i, input in enumerate(dependencies): - camera_node.setInput(i, input) - # link to original output nodes - for d in dependent: - index = next((i for i, dpcy in enumerate( - d.dependencies()) - if camera_node is dpcy), 0) - d.setInput(index, camera_node) - - # color node by correct color by actual version - self.node_version_color( - context["project"]["name"], version_entity, camera_node - ) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - return update_container(camera_node, data_imprint) - - def node_version_color(self, project_name, version_entity, node): - """ Coloring a node by correct color by actual version - """ - # get all versions in list - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.node_color - else: - color_value = "0xd88467ff" - node["tile_color"].setValue(int(color_value, 16)) - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_clip.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_clip.py deleted file mode 100644 index d1e38eea6b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_clip.py +++ /dev/null @@ -1,584 +0,0 @@ -from copy import deepcopy - -import nuke -import qargparse -import ayon_api - -from ayon_core.lib import Logger -from ayon_core.pipeline import ( - get_representation_path, -) -from ayon_core.pipeline.colorspace import ( - get_imageio_file_rules_colorspace_from_filepath, - get_current_context_imageio_config_preset, -) -from ayon_nuke.api.lib import ( - get_imageio_input_colorspace, - maintained_selection -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop, - colorspace_exists_on_node -) -from ayon_core.lib.transcoding import ( - VIDEO_EXTENSIONS, - IMAGE_EXTENSIONS -) -from ayon_nuke.api import plugin - - -class LoadClip(plugin.NukeLoader): - """Load clip into Nuke - - Either it is image sequence or video file. - """ - log = Logger.get_logger(__name__) - - product_types = { - "source", - "plate", - "render", - "prerender", - "review", - } - representations = {"*"} - extensions = set( - ext.lstrip(".") for ext in IMAGE_EXTENSIONS.union(VIDEO_EXTENSIONS) - ) - - settings_category = "nuke" - - label = "Load Clip" - order = -20 - icon = "file-video-o" - color = "white" - - # Loaded from settings - representations_include = [] - - script_start = int(nuke.root()["first_frame"].value()) - - # option gui - options_defaults = { - "start_at_workfile": True, - "add_retime": True, - "deep_exr": False - } - - node_name_template = "{class_name}_{ext}" - - @classmethod - def get_options(cls, *args): - return [ - qargparse.Boolean( - "start_at_workfile", - help="Load at workfile start frame", - default=cls.options_defaults["start_at_workfile"] - ), - qargparse.Boolean( - "add_retime", - help="Load with retime", - default=cls.options_defaults["add_retime"] - ), - qargparse.Boolean( - "deep_exr", - help="Read with deep exr", - default=cls.options_defaults["deep_exr"] - ) - ] - - @classmethod - def get_representations(cls): - return cls.representations_include or cls.representations - - def load(self, context, name, namespace, options): - """Load asset via database.""" - project_name = context["project"]["name"] - repre_entity = context["representation"] - version_entity = context["version"] - version_attributes = version_entity["attrib"] - version_data = version_entity["data"] - - # reset container id so it is always unique for each instance - self.reset_container_id() - - is_sequence = len(repre_entity["files"]) > 1 - - if is_sequence: - context["representation"] = ( - self._representation_with_hash_in_frame(repre_entity) - ) - - filepath = self.filepath_from_context(context) - filepath = filepath.replace("\\", "/") - self.log.debug("_ filepath: {}".format(filepath)) - - start_at_workfile = options.get( - "start_at_workfile", self.options_defaults["start_at_workfile"]) - - add_retime = options.get( - "add_retime", self.options_defaults["add_retime"]) - - deep_exr = options.get( - "deep_exr", self.options_defaults["deep_exr"]) - - repre_id = repre_entity["id"] - - self.log.debug( - "Representation id `{}` ".format(repre_id)) - - self.handle_start = version_attributes.get("handleStart", 0) - self.handle_end = version_attributes.get("handleEnd", 0) - - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - first -= self.handle_start - last += self.handle_end - - if not is_sequence: - duration = last - first - first = 1 - last = first + duration - - # If a slate is present, the frame range is 1 frame longer for movies, - # but file sequences its the first frame that is 1 frame lower. - slate_frames = repre_entity["data"].get("slateFrames", 0) - extension = "." + repre_entity["context"]["ext"] - - if extension in VIDEO_EXTENSIONS: - last += slate_frames - - files_count = len(repre_entity["files"]) - if extension in IMAGE_EXTENSIONS and files_count != 1: - first -= slate_frames - - # Fallback to folder name when namespace is None - if namespace is None: - namespace = context["folder"]["name"] - - if not filepath: - self.log.warning( - "Representation id `{}` is failing to load".format(repre_id)) - return - - read_name = self._get_node_name(context) - read_node = None - if deep_exr: - # Create the Loader with the filename path set - read_node = nuke.createNode( - "DeepRead", - "name {}".format(read_name), - inpanel=False - ) - else: - # Create the Loader with the filename path set - read_node = nuke.createNode( - "Read", - "name {}".format(read_name), - inpanel=False - ) - - # get colorspace - colorspace = ( - repre_entity["data"].get("colorspace") - or version_attributes.get("colorSpace") - ) - - # to avoid multiple undo steps for rest of process - # we will switch off undo-ing - with viewer_update_and_undo_stop(): - read_node["file"].setValue(filepath) - if read_node.Class() == "Read": - self.set_colorspace_to_node( - read_node, - filepath, - project_name, - version_entity, - repre_entity - ) - - self._set_range_to_node( - read_node, first, last, start_at_workfile, slate_frames - ) - - version_name = version_entity["version"] - if version_name < 0: - version_name = "hero" - - data_imprint = { - "version": version_name, - "db_colorspace": colorspace - } - - # add attributes from the version to imprint metadata knob - for key in [ - "frameStart", - "frameEnd", - "source", - "fps", - "handleStart", - "handleEnd", - ]: - value = version_attributes.get(key, str(None)) - if isinstance(value, str): - value = value.replace("\\", "/") - data_imprint[key] = value - - if add_retime and version_data.get("retime"): - data_imprint["addRetime"] = True - - read_node["tile_color"].setValue(int("0x4ecd25ff", 16)) - - container = containerise( - read_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - if add_retime and version_data.get("retime"): - self._make_retimes(read_node, version_data) - - self.set_as_member(read_node) - - return container - - def switch(self, container, context): - self.update(container, context) - - def _representation_with_hash_in_frame(self, repre_entity): - """Convert frame key value to padded hash - - Args: - repre_entity (dict): Representation entity. - - Returns: - dict: altered representation data - - """ - new_repre_entity = deepcopy(repre_entity) - context = new_repre_entity["context"] - - # Get the frame from the context and hash it - frame = context["frame"] - hashed_frame = "#" * len(str(frame)) - - # Replace the frame with the hash in the originalBasename - if ( - "{originalBasename}" in new_repre_entity["attrib"]["template"] - ): - origin_basename = context["originalBasename"] - context["originalBasename"] = origin_basename.replace( - frame, hashed_frame - ) - - # Replace the frame with the hash in the frame - new_repre_entity["context"]["frame"] = hashed_frame - return new_repre_entity - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - version_attributes = version_entity["attrib"] - version_data = version_entity["data"] - - is_sequence = len(repre_entity["files"]) > 1 - - read_node = container["node"] - - if is_sequence: - repre_entity = self._representation_with_hash_in_frame( - repre_entity - ) - - filepath = ( - get_representation_path(repre_entity) - ).replace("\\", "/") - self.log.debug("_ filepath: {}".format(filepath)) - - start_at_workfile = "start at" in read_node['frame_mode'].value() - - add_retime = [ - key for key in read_node.knobs().keys() - if "addRetime" in key - ] - - repre_id = repre_entity["id"] - - # colorspace profile - colorspace = ( - repre_entity["data"].get("colorspace") - or version_attributes.get("colorSpace") - ) - - self.handle_start = version_attributes.get("handleStart", 0) - self.handle_end = version_attributes.get("handleEnd", 0) - - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - first -= self.handle_start - last += self.handle_end - - if not is_sequence: - duration = last - first - first = 1 - last = first + duration - - if not filepath: - self.log.warning( - "Representation id `{}` is failing to load".format(repre_id)) - return - - read_node["file"].setValue(filepath) - - # to avoid multiple undo steps for rest of process - # we will switch off undo-ing - with viewer_update_and_undo_stop(): - if read_node.Class() == "Read": - self.set_colorspace_to_node( - read_node, - filepath, - project_name, - version_entity, - repre_entity - ) - - self._set_range_to_node(read_node, first, last, start_at_workfile) - - updated_dict = { - "representation": repre_entity["id"], - "frameStart": str(first), - "frameEnd": str(last), - "version": str(version_entity["version"]), - "db_colorspace": colorspace, - "source": version_attributes.get("source"), - "handleStart": str(self.handle_start), - "handleEnd": str(self.handle_end), - "fps": str(version_attributes.get("fps")) - } - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - # change color of read_node - if version_entity["id"] == last_version_entity["id"]: - color_value = "0x4ecd25ff" - else: - color_value = "0xd84f20ff" - read_node["tile_color"].setValue(int(color_value, 16)) - - # Update the imprinted representation - update_container(read_node, updated_dict) - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - if add_retime and version_data.get("retime"): - self._make_retimes(read_node, version_data) - else: - self.clear_members(read_node) - - self.set_as_member(read_node) - - def set_colorspace_to_node( - self, - read_node, - filepath, - project_name, - version_entity, - repre_entity, - ): - """Set colorspace to read node. - - Sets colorspace with available names validation. - - Args: - read_node (nuke.Node): The nuke's read node - filepath (str): File path. - project_name (str): Project name. - version_entity (dict): Version entity. - repre_entity (dict): Representation entity. - - """ - used_colorspace = self._get_colorspace_data( - project_name, version_entity, repre_entity, filepath - ) - if ( - used_colorspace - and colorspace_exists_on_node(read_node, used_colorspace) - ): - self.log.info(f"Used colorspace: {used_colorspace}") - read_node["colorspace"].setValue(used_colorspace) - else: - self.log.info("Colorspace not set...") - - def remove(self, container): - read_node = container["node"] - assert read_node.Class() == "Read", "Must be Read" - - with viewer_update_and_undo_stop(): - members = self.get_members(read_node) - nuke.delete(read_node) - for member in members: - nuke.delete(member) - - def _set_range_to_node( - self, read_node, first, last, start_at_workfile, slate_frames=0 - ): - read_node['origfirst'].setValue(int(first)) - read_node['first'].setValue(int(first)) - read_node['origlast'].setValue(int(last)) - read_node['last'].setValue(int(last)) - - # set start frame depending on workfile or version - if start_at_workfile: - read_node['frame_mode'].setValue("start at") - - start_frame = self.script_start - slate_frames - - read_node['frame'].setValue(str(start_frame)) - - def _make_retimes(self, parent_node, version_data): - ''' Create all retime and timewarping nodes with copied animation ''' - speed = version_data.get('speed', 1) - time_warp_nodes = version_data.get('timewarps', []) - last_node = None - source_id = self.get_container_id(parent_node) - self.log.debug("__ source_id: {}".format(source_id)) - self.log.debug("__ members: {}".format( - self.get_members(parent_node))) - - dependent_nodes = self.clear_members(parent_node) - - with maintained_selection(): - parent_node['selected'].setValue(True) - - if speed != 1: - rtn = nuke.createNode( - "Retime", - "speed {}".format(speed)) - - rtn["before"].setValue("continue") - rtn["after"].setValue("continue") - rtn["input.first_lock"].setValue(True) - rtn["input.first"].setValue( - self.script_start - ) - self.set_as_member(rtn) - last_node = rtn - - if time_warp_nodes != []: - start_anim = self.script_start + (self.handle_start / speed) - for timewarp in time_warp_nodes: - twn = nuke.createNode( - timewarp["Class"], - "name {}".format(timewarp["name"]) - ) - if isinstance(timewarp["lookup"], list): - # if array for animation - twn["lookup"].setAnimated() - for i, value in enumerate(timewarp["lookup"]): - twn["lookup"].setValueAt( - (start_anim + i) + value, - (start_anim + i)) - else: - # if static value `int` - twn["lookup"].setValue(timewarp["lookup"]) - - self.set_as_member(twn) - last_node = twn - - if dependent_nodes: - # connect to original inputs - for i, n in enumerate(dependent_nodes): - last_node.setInput(i, n) - - def _get_node_name(self, context): - folder_entity = context["folder"] - product_name = context["product"]["name"] - repre_entity = context["representation"] - - folder_name = folder_entity["name"] - repre_cont = repre_entity["context"] - name_data = { - "folder": { - "name": folder_name, - }, - "product": { - "name": product_name, - }, - "asset": folder_name, - "subset": product_name, - "representation": repre_entity["name"], - "ext": repre_cont["representation"], - "id": repre_entity["id"], - "class_name": self.__class__.__name__ - } - - return self.node_name_template.format(**name_data) - - def _get_colorspace_data( - self, project_name, version_entity, repre_entity, filepath - ): - """Get colorspace data from version and representation documents - - Args: - project_name (str): Project name. - version_entity (dict): Version entity. - repre_entity (dict): Representation entity. - filepath (str): File path. - - Returns: - Any[str,None]: colorspace name or None - """ - # Get backward compatible colorspace key. - colorspace = repre_entity["data"].get("colorspace") - self.log.debug( - f"Colorspace from representation colorspace: {colorspace}" - ) - - # Get backward compatible version data key if colorspace is not found. - if not colorspace: - colorspace = version_entity["attrib"].get("colorSpace") - self.log.debug( - f"Colorspace from version colorspace: {colorspace}" - ) - - # Get colorspace from representation colorspaceData if colorspace is - # not found. - if not colorspace: - colorspace_data = repre_entity["data"].get("colorspaceData", {}) - colorspace = colorspace_data.get("colorspace") - self.log.debug( - f"Colorspace from representation colorspaceData: {colorspace}" - ) - - config_data = get_current_context_imageio_config_preset() - # check if any filerules are not applicable - new_parsed_colorspace = get_imageio_file_rules_colorspace_from_filepath( # noqa - filepath, "nuke", project_name, config_data=config_data - ) - self.log.debug(f"Colorspace new filerules: {new_parsed_colorspace}") - - # colorspace from `project_settings/nuke/imageio/regexInputs` - old_parsed_colorspace = get_imageio_input_colorspace(filepath) - self.log.debug(f"Colorspace old filerules: {old_parsed_colorspace}") - - return ( - new_parsed_colorspace - or old_parsed_colorspace - or colorspace - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects.py deleted file mode 100644 index e923a02424..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects.py +++ /dev/null @@ -1,361 +0,0 @@ -import json -from collections import OrderedDict -import nuke -import six -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class LoadEffects(load.LoaderPlugin): - """Loading colorspace soft effect exported from nukestudio""" - - product_types = {"effect"} - representations = {"*"} - extensions = {"json"} - - settings_category = "nuke" - - label = "Load Effects - nodes" - order = 0 - icon = "cc" - color = "white" - ignore_attr = ["useLifetime"] - - def load(self, context, name, namespace, data): - """ - Loading function to get the soft effects to particular read node - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke node: containerised nuke node object - """ - # get main variables - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace, - } - - # add additional metadata from the version to imprint to Avalon knob - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - # getting data from json file with unicode conversion - with open(file, "r") as f: - json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).items()} - - # get correct order of nodes by positions on track and subtrack - nodes_order = self.reorder_nodes(json_f) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - GN = nuke.createNode( - "Group", - "name {}_1".format(object_name), - inpanel=False - ) - - # adding content to the group node - with GN: - pre_node = nuke.createNode("Input") - pre_node["name"].setValue("rgb") - - for ef_name, ef_val in nodes_order.items(): - node = nuke.createNode(ef_val["class"]) - for k, v in ef_val["node"].items(): - if k in self.ignore_attr: - continue - - try: - node[k].value() - except NameError as e: - self.log.warning(e) - continue - - if isinstance(v, list) and len(v) > 4: - node[k].setAnimated() - for i, value in enumerate(v): - if isinstance(value, list): - for ci, cv in enumerate(value): - node[k].setValueAt( - cv, - (workfile_first_frame + i), - ci) - else: - node[k].setValueAt( - value, - (workfile_first_frame + i)) - else: - node[k].setValue(v) - node.setInput(0, pre_node) - pre_node = node - - output = nuke.createNode("Output") - output.setInput(0, pre_node) - - # try to find parent read node - self.connect_read_node(GN, namespace, json_f["assignTo"]) - - GN["tile_color"].setValue(int("0x3469ffff", 16)) - - self.log.info("Loaded lut setup: `{}`".format(GN["name"].value())) - - return containerise( - node=GN, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - # get main variables - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - # get corresponding node - GN = container["node"] - - file = get_representation_path(repre_entity).replace("\\", "/") - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - namespace = container["namespace"] - - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps", - ]: - data_imprint[k] = version_attributes[k] - - # Update the imprinted representation - update_container( - GN, - data_imprint - ) - - # getting data from json file with unicode conversion - with open(file, "r") as f: - json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).items()} - - # get correct order of nodes by positions on track and subtrack - nodes_order = self.reorder_nodes(json_f) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - # adding content to the group node - with GN: - # first remove all nodes - [nuke.delete(n) for n in nuke.allNodes()] - - # create input node - pre_node = nuke.createNode("Input") - pre_node["name"].setValue("rgb") - - for _, ef_val in nodes_order.items(): - node = nuke.createNode(ef_val["class"]) - for k, v in ef_val["node"].items(): - if k in self.ignore_attr: - continue - - try: - node[k].value() - except NameError as e: - self.log.warning(e) - continue - - if isinstance(v, list) and len(v) > 4: - node[k].setAnimated() - for i, value in enumerate(v): - if isinstance(value, list): - for ci, cv in enumerate(value): - node[k].setValueAt( - cv, - (workfile_first_frame + i), - ci) - else: - node[k].setValueAt( - value, - (workfile_first_frame + i)) - else: - node[k].setValue(v) - node.setInput(0, pre_node) - pre_node = node - - # create output node - output = nuke.createNode("Output") - output.setInput(0, pre_node) - - # try to find parent read node - self.connect_read_node(GN, namespace, json_f["assignTo"]) - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = "0x3469ffff" - else: - color_value = "0xd84f20ff" - - GN["tile_color"].setValue(int(color_value, 16)) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - def connect_read_node(self, group_node, namespace, product_name): - """ - Finds read node and selects it - - Arguments: - namespace (str): namespace name - - Returns: - nuke node: node is selected - None: if nothing found - """ - search_name = "{0}_{1}".format(namespace, product_name) - - node = [ - n for n in nuke.allNodes(filter="Read") - if search_name in n["file"].value() - ] - if len(node) > 0: - rn = node[0] - else: - rn = None - - # Parent read node has been found - # solving connections - if rn: - dep_nodes = rn.dependent() - - if len(dep_nodes) > 0: - for dn in dep_nodes: - dn.setInput(0, group_node) - - group_node.setInput(0, rn) - group_node.autoplace() - - def reorder_nodes(self, data): - new_order = OrderedDict() - trackNums = [v["trackIndex"] for k, v in data.items() - if isinstance(v, dict)] - subTrackNums = [v["subTrackIndex"] for k, v in data.items() - if isinstance(v, dict)] - - for trackIndex in range( - min(trackNums), max(trackNums) + 1): - for subTrackIndex in range( - min(subTrackNums), max(subTrackNums) + 1): - item = self.get_item(data, trackIndex, subTrackIndex) - if item is not {}: - new_order.update(item) - return new_order - - def get_item(self, data, trackIndex, subTrackIndex): - return {key: val for key, val in data.items() - if isinstance(val, dict) - if subTrackIndex == val["subTrackIndex"] - if trackIndex == val["trackIndex"]} - - def byteify(self, input): - """ - Converts unicode strings to strings - It goes through all dictionary - - Arguments: - input (dict/str): input - - Returns: - dict: with fixed values and keys - - """ - - if isinstance(input, dict): - return {self.byteify(key): self.byteify(value) - for key, value in input.items()} - elif isinstance(input, list): - return [self.byteify(element) for element in input] - elif isinstance(input, six.text_type): - return str(input) - else: - return input - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects_ip.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects_ip.py deleted file mode 100644 index ce7e7debeb..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_effects_ip.py +++ /dev/null @@ -1,372 +0,0 @@ -import json -from collections import OrderedDict -import six -import nuke -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api import lib -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class LoadEffectsInputProcess(load.LoaderPlugin): - """Loading colorspace soft effect exported from nukestudio""" - - product_types = {"effect"} - representations = {"*"} - extensions = {"json"} - - settings_category = "nuke" - - label = "Load Effects - Input Process" - order = 0 - icon = "eye" - color = "#cc0000" - ignore_attr = ["useLifetime"] - - def load(self, context, name, namespace, data): - """ - Loading function to get the soft effects to particular read node - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke node: containerised nuke node object - """ - - # get main variables - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace, - } - # add additional metadata from the version to imprint to Avalon knob - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - # getting data from json file with unicode conversion - with open(file, "r") as f: - json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).items()} - - # get correct order of nodes by positions on track and subtrack - nodes_order = self.reorder_nodes(json_f) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - GN = nuke.createNode( - "Group", - "name {}_1".format(object_name), - inpanel=False - ) - - # adding content to the group node - with GN: - pre_node = nuke.createNode("Input") - pre_node["name"].setValue("rgb") - - for _, ef_val in nodes_order.items(): - node = nuke.createNode(ef_val["class"]) - for k, v in ef_val["node"].items(): - if k in self.ignore_attr: - continue - - try: - node[k].value() - except NameError as e: - self.log.warning(e) - continue - - if isinstance(v, list) and len(v) > 4: - node[k].setAnimated() - for i, value in enumerate(v): - if isinstance(value, list): - for ci, cv in enumerate(value): - node[k].setValueAt( - cv, - (workfile_first_frame + i), - ci) - else: - node[k].setValueAt( - value, - (workfile_first_frame + i)) - else: - node[k].setValue(v) - - node.setInput(0, pre_node) - pre_node = node - - output = nuke.createNode("Output") - output.setInput(0, pre_node) - - # try to place it under Viewer1 - if not self.connect_active_viewer(GN): - nuke.delete(GN) - return - - GN["tile_color"].setValue(int("0x3469ffff", 16)) - - self.log.info("Loaded lut setup: `{}`".format(GN["name"].value())) - - return containerise( - node=GN, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - # get main variables - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - # get corresponding node - GN = container["node"] - - file = get_representation_path(repre_entity).replace("\\", "/") - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - workfile_first_frame = int(nuke.root()["first_frame"].getValue()) - - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace, - } - - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # Update the imprinted representation - update_container( - GN, - data_imprint - ) - - # getting data from json file with unicode conversion - with open(file, "r") as f: - json_f = {self.byteify(key): self.byteify(value) - for key, value in json.load(f).items()} - - # get correct order of nodes by positions on track and subtrack - nodes_order = self.reorder_nodes(json_f) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - # adding content to the group node - with GN: - # first remove all nodes - [nuke.delete(n) for n in nuke.allNodes()] - - # create input node - pre_node = nuke.createNode("Input") - pre_node["name"].setValue("rgb") - - for _, ef_val in nodes_order.items(): - node = nuke.createNode(ef_val["class"]) - for k, v in ef_val["node"].items(): - if k in self.ignore_attr: - continue - - try: - node[k].value() - except NameError as e: - self.log.warning(e) - continue - - if isinstance(v, list) and len(v) > 4: - node[k].setAnimated() - for i, value in enumerate(v): - if isinstance(value, list): - for ci, cv in enumerate(value): - node[k].setValueAt( - cv, - (workfile_first_frame + i), - ci) - else: - node[k].setValueAt( - value, - (workfile_first_frame + i)) - else: - node[k].setValue(v) - node.setInput(0, pre_node) - pre_node = node - - # create output node - output = nuke.createNode("Output") - output.setInput(0, pre_node) - - # get all versions in list - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = "0x3469ffff" - else: - color_value = "0xd84f20ff" - GN["tile_color"].setValue(int(color_value, 16)) - - self.log.info("updated to version: {}".format(version_entity["name"])) - - def connect_active_viewer(self, group_node): - """ - Finds Active viewer and - place the node under it, also adds - name of group into Input Process of the viewer - - Arguments: - group_node (nuke node): nuke group node object - - """ - group_node_name = group_node["name"].value() - - viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] - if len(viewer) > 0: - viewer = viewer[0] - else: - msg = str("Please create Viewer node before you " - "run this action again") - self.log.error(msg) - nuke.message(msg) - return None - - # get coordinates of Viewer1 - xpos = viewer["xpos"].value() - ypos = viewer["ypos"].value() - - ypos += 150 - - viewer["ypos"].setValue(ypos) - - # set coordinates to group node - group_node["xpos"].setValue(xpos) - group_node["ypos"].setValue(ypos + 50) - - # add group node name to Viewer Input Process - viewer["input_process_node"].setValue(group_node_name) - - # put backdrop under - lib.create_backdrop( - label="Input Process", - layer=2, - nodes=[viewer, group_node], - color="0x7c7faaff") - - return True - - def reorder_nodes(self, data): - new_order = OrderedDict() - trackNums = [v["trackIndex"] for k, v in data.items() - if isinstance(v, dict)] - subTrackNums = [v["subTrackIndex"] for k, v in data.items() - if isinstance(v, dict)] - - for trackIndex in range( - min(trackNums), max(trackNums) + 1): - for subTrackIndex in range( - min(subTrackNums), max(subTrackNums) + 1): - item = self.get_item(data, trackIndex, subTrackIndex) - if item is not {}: - new_order.update(item) - return new_order - - def get_item(self, data, trackIndex, subTrackIndex): - return {key: val for key, val in data.items() - if isinstance(val, dict) - if subTrackIndex == val["subTrackIndex"] - if trackIndex == val["trackIndex"]} - - def byteify(self, input): - """ - Converts unicode strings to strings - It goes through all dictionary - - Arguments: - input (dict/str): input - - Returns: - dict: with fixed values and keys - - """ - - if isinstance(input, dict): - return {self.byteify(key): self.byteify(value) - for key, value in input.items()} - elif isinstance(input, list): - return [self.byteify(element) for element in input] - elif isinstance(input, six.text_type): - return str(input) - else: - return input - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo.py deleted file mode 100644 index 1c91af0c1c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo.py +++ /dev/null @@ -1,190 +0,0 @@ -import nuke -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import ( - maintained_selection, - get_avalon_knob_data, - set_avalon_knob_data, - swap_node_with_dependency, -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class LoadGizmo(load.LoaderPlugin): - """Loading nuke Gizmo""" - - product_types = {"gizmo"} - representations = {"*"} - extensions = {"nk"} - - settings_category = "nuke" - - label = "Load Gizmo" - order = 0 - icon = "dropbox" - color = "white" - node_color = "0x75338eff" - - def load(self, context, name, namespace, data): - """ - Loading function to get Gizmo into node graph - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke node: containerized nuke node object - """ - - # get main variables - version_entity = context["version"] - version_attributes = version_entity["attrib"] - - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - # add attributes from the version to imprint to metadata knob - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - with maintained_selection(): - # add group from nk - nuke.nodePaste(file) - - group_node = nuke.selectedNode() - - group_node["name"].setValue(object_name) - - return containerise( - node=group_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - # get main variables - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - version_attributes = version_entity["attrib"] - - # get corresponding node - group_node = container["node"] - - file = get_representation_path(repre_entity).replace("\\", "/") - - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # capture pipeline metadata - avalon_data = get_avalon_knob_data(group_node) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - with maintained_selection([group_node]): - # insert nuke script to the script - nuke.nodePaste(file) - # convert imported to selected node - new_group_node = nuke.selectedNode() - # swap nodes with maintained connections - with swap_node_with_dependency( - group_node, new_group_node) as node_name: - new_group_node["name"].setValue(node_name) - # set updated pipeline metadata - set_avalon_knob_data(new_group_node, avalon_data) - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.node_color - else: - color_value = "0xd88467ff" - - new_group_node["tile_color"].setValue(int(color_value, 16)) - - self.log.info( - "updated to version: {}".format(version_entity["name"]) - ) - - return update_container(new_group_node, data_imprint) - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo_ip.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo_ip.py deleted file mode 100644 index 36e878fdf1..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_gizmo_ip.py +++ /dev/null @@ -1,270 +0,0 @@ -import nuke -import six -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import ( - maintained_selection, - create_backdrop, - get_avalon_knob_data, - set_avalon_knob_data, - swap_node_with_dependency, -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class LoadGizmoInputProcess(load.LoaderPlugin): - """Loading colorspace soft effect exported from nukestudio""" - - product_types = {"gizmo"} - representations = {"*"} - extensions = {"nk"} - - settings_category = "nuke" - - label = "Load Gizmo - Input Process" - order = 0 - icon = "eye" - color = "#cc0000" - node_color = "0x7533c1ff" - - def load(self, context, name, namespace, data): - """ - Loading function to get Gizmo as Input Process on viewer - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke node: containerized nuke node object - """ - - # get main variables - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - # add additional metadata from the version to imprint to metadata knob - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - with maintained_selection(): - # add group from nk - nuke.nodePaste(file) - - group_node = nuke.selectedNode() - - group_node["name"].setValue(object_name) - - # try to place it under Viewer1 - if not self.connect_active_viewer(group_node): - nuke.delete(group_node) - return - - return containerise( - node=group_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - - # get main variables - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - # get corresponding node - group_node = container["node"] - - file = get_representation_path(repre_entity).replace("\\", "/") - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspaceInput": colorspace - } - - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # capture pipeline metadata - avalon_data = get_avalon_knob_data(group_node) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - with maintained_selection([group_node]): - # insert nuke script to the script - nuke.nodePaste(file) - # convert imported to selected node - new_group_node = nuke.selectedNode() - # swap nodes with maintained connections - with swap_node_with_dependency( - group_node, new_group_node) as node_name: - new_group_node["name"].setValue(node_name) - # set updated pipeline metadata - set_avalon_knob_data(new_group_node, avalon_data) - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.node_color - else: - color_value = "0xd88467ff" - new_group_node["tile_color"].setValue(int(color_value, 16)) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - return update_container(new_group_node, data_imprint) - - def connect_active_viewer(self, group_node): - """ - Finds Active viewer and - place the node under it, also adds - name of group into Input Process of the viewer - - Arguments: - group_node (nuke node): nuke group node object - - """ - group_node_name = group_node["name"].value() - - viewer = [n for n in nuke.allNodes() if "Viewer1" in n["name"].value()] - if len(viewer) > 0: - viewer = viewer[0] - else: - msg = str("Please create Viewer node before you " - "run this action again") - self.log.error(msg) - nuke.message(msg) - return None - - # get coordinates of Viewer1 - xpos = viewer["xpos"].value() - ypos = viewer["ypos"].value() - - ypos += 150 - - viewer["ypos"].setValue(ypos) - - # set coordinates to group node - group_node["xpos"].setValue(xpos) - group_node["ypos"].setValue(ypos + 50) - - # add group node name to Viewer Input Process - viewer["input_process_node"].setValue(group_node_name) - - # put backdrop under - create_backdrop( - label="Input Process", - layer=2, - nodes=[viewer, group_node], - color="0x7c7faaff" - ) - - return True - - def get_item(self, data, trackIndex, subTrackIndex): - return {key: val for key, val in data.items() - if subTrackIndex == val["subTrackIndex"] - if trackIndex == val["trackIndex"]} - - def byteify(self, input): - """ - Converts unicode strings to strings - It goes through all dictionary - - Arguments: - input (dict/str): input - - Returns: - dict: with fixed values and keys - - """ - - if isinstance(input, dict): - return {self.byteify(key): self.byteify(value) - for key, value in input.items()} - elif isinstance(input, list): - return [self.byteify(element) for element in input] - elif isinstance(input, six.text_type): - return str(input) - else: - return input - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_image.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_image.py deleted file mode 100644 index 0c43f5a5ca..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_image.py +++ /dev/null @@ -1,254 +0,0 @@ -import nuke - -import qargparse -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import ( - get_imageio_input_colorspace -) -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) -from ayon_core.lib.transcoding import ( - IMAGE_EXTENSIONS -) - - -class LoadImage(load.LoaderPlugin): - """Load still image into Nuke""" - - product_types = { - "render2d", - "source", - "plate", - "render", - "prerender", - "review", - "image", - } - representations = {"*"} - extensions = set(ext.lstrip(".") for ext in IMAGE_EXTENSIONS) - - settings_category = "nuke" - - label = "Load Image" - order = -10 - icon = "image" - color = "white" - - # Loaded from settings - representations_include = [] - - node_name_template = "{class_name}_{ext}" - - options = [ - qargparse.Integer( - "frame_number", - label="Frame Number", - default=int(nuke.root()["first_frame"].getValue()), - min=1, - max=999999, - help="What frame is reading from?" - ) - ] - - @classmethod - def get_representations(cls): - return cls.representations_include or cls.representations - - def load(self, context, name, namespace, options): - self.log.info("__ options: `{}`".format(options)) - frame_number = options.get( - "frame_number", int(nuke.root()["first_frame"].getValue()) - ) - - version_entity = context["version"] - version_attributes = version_entity["attrib"] - repre_entity = context["representation"] - repre_id = repre_entity["id"] - - self.log.debug( - "Representation id `{}` ".format(repre_id)) - - last = first = int(frame_number) - - # Fallback to folder name when namespace is None - if namespace is None: - namespace = context["folder"]["name"] - - file = self.filepath_from_context(context) - - if not file: - self.log.warning( - "Representation id `{}` is failing to load".format(repre_id)) - return - - file = file.replace("\\", "/") - - frame = repre_entity["context"].get("frame") - if frame: - padding = len(frame) - file = file.replace( - frame, - format(frame_number, "0{}".format(padding))) - - read_name = self._get_node_name(context) - - # Create the Loader with the filename path set - with viewer_update_and_undo_stop(): - r = nuke.createNode( - "Read", - "name {}".format(read_name), - inpanel=False - ) - - r["file"].setValue(file) - - # Set colorspace defined in version data - colorspace = version_entity["attrib"].get("colorSpace") - if colorspace: - r["colorspace"].setValue(str(colorspace)) - - preset_clrsp = get_imageio_input_colorspace(file) - - if preset_clrsp is not None: - r["colorspace"].setValue(preset_clrsp) - - r["origfirst"].setValue(first) - r["first"].setValue(first) - r["origlast"].setValue(last) - r["last"].setValue(last) - - # add attributes from the version to imprint metadata knob - colorspace = version_attributes["colorSpace"] - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"], - "colorspace": colorspace, - } - for k in ["source", "fps"]: - data_imprint[k] = version_attributes.get(k, str(None)) - - r["tile_color"].setValue(int("0x4ecd25ff", 16)) - - return containerise(r, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def switch(self, container, context): - self.update(container, context) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - node = container["node"] - frame_number = node["first"].value() - - assert node.Class() == "Read", "Must be Read" - - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - repr_cont = repre_entity["context"] - - file = get_representation_path(repre_entity) - - if not file: - repre_id = repre_entity["id"] - self.log.warning( - "Representation id `{}` is failing to load".format(repre_id)) - return - - file = file.replace("\\", "/") - - frame = repr_cont.get("frame") - if frame: - padding = len(frame) - file = file.replace( - frame, - format(frame_number, "0{}".format(padding))) - - # Get start frame from version data - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - last = first = int(frame_number) - - # Set the global in to the start frame of the sequence - node["file"].setValue(file) - node["origfirst"].setValue(first) - node["first"].setValue(first) - node["origlast"].setValue(last) - node["last"].setValue(last) - - version_attributes = version_entity["attrib"] - updated_dict = { - "representation": repre_entity["id"], - "frameStart": str(first), - "frameEnd": str(last), - "version": str(version_entity["version"]), - "colorspace": version_attributes.get("colorSpace"), - "source": version_attributes.get("source"), - "fps": str(version_attributes.get("fps")), - } - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = "0x4ecd25ff" - else: - color_value = "0xd84f20ff" - node["tile_color"].setValue(int(color_value, 16)) - - # Update the imprinted representation - update_container(node, updated_dict) - self.log.info("updated to version: {}".format( - version_entity["version"] - )) - - def remove(self, container): - node = container["node"] - assert node.Class() == "Read", "Must be Read" - - with viewer_update_and_undo_stop(): - nuke.delete(node) - - def _get_node_name(self, context): - folder_entity = context["folder"] - product_name = context["product"]["name"] - repre_entity = context["representation"] - - folder_name = folder_entity["name"] - repre_cont = repre_entity["context"] - name_data = { - "folder": { - "name": folder_name, - }, - "product": { - "name": product_name, - }, - "asset": folder_name, - "subset": product_name, - "representation": repre_entity["name"], - "ext": repre_cont["representation"], - "id": repre_entity["id"], - "class_name": self.__class__.__name__ - } - - return self.node_name_template.format(**name_data) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_matchmove.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_matchmove.py deleted file mode 100644 index c1b5a24504..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_matchmove.py +++ /dev/null @@ -1,32 +0,0 @@ -import nuke -from ayon_core.pipeline import load - - -class MatchmoveLoader(load.LoaderPlugin): - """ - This will run matchmove script to create track in script. - """ - - product_types = {"matchmove"} - representations = {"*"} - extensions = {"py"} - - settings_category = "nuke" - - defaults = ["Camera", "Object"] - - label = "Run matchmove script" - icon = "empire" - color = "orange" - - def load(self, context, name, namespace, data): - path = self.filepath_from_context(context) - if path.lower().endswith(".py"): - exec(open(path).read()) - - else: - msg = "Unsupported script type" - self.log.error(msg) - nuke.message(msg) - - return True diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_model.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_model.py deleted file mode 100644 index 551147be96..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_model.py +++ /dev/null @@ -1,207 +0,0 @@ -import nuke -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import maintained_selection -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class AlembicModelLoader(load.LoaderPlugin): - """ - This will load alembic model or anim into script. - """ - - product_types = {"model", "pointcache", "animation"} - representations = {"*"} - extensions = {"abc"} - - settings_category = "nuke" - - label = "Load Alembic" - icon = "cube" - color = "orange" - node_color = "0x4ecd91ff" - - def load(self, context, name, namespace, data): - # get main variables - project_name = context["project"]["name"] - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() - - namespace = namespace or context["folder"]["name"] - object_name = "{}_{}".format(name, namespace) - - # prepare data for imprinting - data_imprint = { - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"] - } - # add attributes from the version to imprint to metadata knob - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = self.filepath_from_context(context).replace("\\", "/") - - with maintained_selection(): - model_node = nuke.createNode( - "ReadGeo2", - "name {} file {} ".format( - object_name, file), - inpanel=False - ) - - model_node.forceValidate() - - # Ensure all items are imported and selected. - scene_view = model_node.knob('scene_view') - scene_view.setImportedItems(scene_view.getAllItems()) - scene_view.setSelectedItems(scene_view.getAllItems()) - - model_node["frame_rate"].setValue(float(fps)) - - # workaround because nuke's bug is not adding - # animation keys properly - xpos = model_node.xpos() - ypos = model_node.ypos() - nuke.nodeCopy("%clipboard%") - nuke.delete(model_node) - nuke.nodePaste("%clipboard%") - model_node = nuke.toNode(object_name) - model_node.setXYpos(xpos, ypos) - - # color node by correct color by actual version - self.node_version_color(project_name, version_entity, model_node) - - return containerise( - node=model_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def update(self, container, context): - """ - Called by Scene Inventory when look should be updated to current - version. - If any reference edits cannot be applied, eg. shader renamed and - material not present, reference is unloaded and cleaned. - All failed edits are highlighted to the user via message box. - - Args: - container: object that has look to be updated - context: (dict): relationship data to get proper - representation from DB and persisted - data in .json - Returns: - None - """ - # Get version from io - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - # get corresponding node - model_node = container["node"] - - # get main variables - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - fps = version_attributes.get("fps") or nuke.root()["fps"].getValue() - - # prepare data for imprinting - data_imprint = { - "representation": repre_entity["id"], - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"] - } - - # add additional metadata from the version to imprint to Avalon knob - for k in ["source", "fps"]: - data_imprint[k] = version_attributes[k] - - # getting file path - file = get_representation_path(repre_entity).replace("\\", "/") - - with maintained_selection(): - model_node['selected'].setValue(True) - - # collect input output dependencies - dependencies = model_node.dependencies() - dependent = model_node.dependent() - - model_node["frame_rate"].setValue(float(fps)) - model_node["file"].setValue(file) - - # Ensure all items are imported and selected. - scene_view = model_node.knob('scene_view') - scene_view.setImportedItems(scene_view.getAllItems()) - scene_view.setSelectedItems(scene_view.getAllItems()) - - # workaround because nuke's bug is - # not adding animation keys properly - xpos = model_node.xpos() - ypos = model_node.ypos() - nuke.nodeCopy("%clipboard%") - nuke.delete(model_node) - - # paste the node back and set the position - nuke.nodePaste("%clipboard%") - model_node = nuke.selectedNode() - model_node.setXYpos(xpos, ypos) - - # link to original input nodes - for i, input in enumerate(dependencies): - model_node.setInput(i, input) - # link to original output nodes - for d in dependent: - index = next((i for i, dpcy in enumerate( - d.dependencies()) - if model_node is dpcy), 0) - d.setInput(index, model_node) - - # color node by correct color by actual version - self.node_version_color(project_name, version_entity, model_node) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - return update_container(model_node, data_imprint) - - def node_version_color(self, project_name, version_entity, node): - """ Coloring a node by correct color by actual version""" - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.node_color - else: - color_value = "0xd88467ff" - node["tile_color"].setValue(int(color_value, 16)) - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = nuke.toNode(container['objectName']) - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_ociolook.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_ociolook.py deleted file mode 100644 index bdff8d7e28..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_ociolook.py +++ /dev/null @@ -1,349 +0,0 @@ -import os -import json -import secrets - -import nuke -import six -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api import ( - containerise, - viewer_update_and_undo_stop, - update_container, -) - - -class LoadOcioLookNodes(load.LoaderPlugin): - """Loading Ocio look to the nuke.Node graph""" - - product_types = {"ociolook"} - representations = {"*"} - extensions = {"json"} - - settings_category = "nuke" - - label = "Load OcioLook [nodes]" - order = 0 - icon = "cc" - color = "white" - ignore_attr = ["useLifetime"] - - # plugin attributes - current_node_color = "0x4ecd91ff" - old_node_color = "0xd88467ff" - - # json file variables - schema_version = 1 - - def load(self, context, name, namespace, data): - """ - Loading function to get the soft effects to particular read node - - Arguments: - context (dict): context of version - name (str): name of the version - namespace (str): namespace name - data (dict): compulsory attribute > not used - - Returns: - nuke.Node: containerized nuke.Node object - """ - namespace = namespace or context["folder"]["name"] - suffix = secrets.token_hex(nbytes=4) - node_name = "{}_{}_{}".format( - name, namespace, suffix) - - # getting file path - filepath = self.filepath_from_context(context) - - json_f = self._load_json_data(filepath) - - group_node = self._create_group_node( - filepath, json_f["data"]) - # renaming group node - group_node["name"].setValue(node_name) - - self._node_version_color( - context["project"]["name"], - context["version"], - group_node - ) - - self.log.info( - "Loaded lut setup: `{}`".format(group_node["name"].value())) - - return containerise( - node=group_node, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__ - ) - - def _create_group_node( - self, - filepath, - data, - group_node=None - ): - """Creates group node with all the nodes inside. - - Creating mainly `OCIOFileTransform` nodes with `OCIOColorSpace` nodes - in between - in case those are needed. - - Arguments: - filepath (str): path to json file - data (dict): data from json file - group_node (Optional[nuke.Node]): group node or None - - Returns: - nuke.Node: group node with all the nodes inside - """ - # get corresponding node - - root_working_colorspace = nuke.root()["workingSpaceLUT"].value() - - dir_path = os.path.dirname(filepath) - all_files = os.listdir(dir_path) - - ocio_working_colorspace = _colorspace_name_by_type( - data["ocioLookWorkingSpace"]) - - # adding nodes to node graph - # just in case we are in group lets jump out of it - nuke.endGroup() - - input_node = None - output_node = None - if group_node: - # remove all nodes between Input and Output nodes - for node in group_node.nodes(): - if node.Class() not in ["Input", "Output"]: - nuke.delete(node) - elif node.Class() == "Input": - input_node = node - elif node.Class() == "Output": - output_node = node - else: - group_node = nuke.createNode( - "Group", - inpanel=False - ) - - # adding content to the group node - with group_node: - pre_colorspace = root_working_colorspace - - # reusing input node if it exists during update - if input_node: - pre_node = input_node - else: - pre_node = nuke.createNode("Input") - pre_node["name"].setValue("rgb") - - # Compare script working colorspace with ocio working colorspace - # found in json file and convert to json's if needed - if pre_colorspace != ocio_working_colorspace: - pre_node = _add_ocio_colorspace_node( - pre_node, - pre_colorspace, - ocio_working_colorspace - ) - pre_colorspace = ocio_working_colorspace - - for ocio_item in data["ocioLookItems"]: - input_space = _colorspace_name_by_type( - ocio_item["input_colorspace"]) - output_space = _colorspace_name_by_type( - ocio_item["output_colorspace"]) - - # making sure we are set to correct colorspace for otio item - if pre_colorspace != input_space: - pre_node = _add_ocio_colorspace_node( - pre_node, - pre_colorspace, - input_space - ) - - node = nuke.createNode("OCIOFileTransform") - - # file path from lut representation - extension = ocio_item["ext"] - item_name = ocio_item["name"] - - item_lut_file = next( - ( - file for file in all_files - if file.endswith(extension) - ), - None - ) - if not item_lut_file: - raise ValueError( - "File with extension '{}' not " - "found in directory".format(extension) - ) - - item_lut_path = os.path.join( - dir_path, item_lut_file).replace("\\", "/") - node["file"].setValue(item_lut_path) - node["name"].setValue(item_name) - node["direction"].setValue(ocio_item["direction"]) - node["interpolation"].setValue(ocio_item["interpolation"]) - node["working_space"].setValue(input_space) - - pre_node.autoplace() - node.setInput(0, pre_node) - node.autoplace() - # pass output space into pre_colorspace for next iteration - # or for output node comparison - pre_colorspace = output_space - pre_node = node - - # making sure we are back in script working colorspace - if pre_colorspace != root_working_colorspace: - pre_node = _add_ocio_colorspace_node( - pre_node, - pre_colorspace, - root_working_colorspace - ) - - # reusing output node if it exists during update - if not output_node: - output = nuke.createNode("Output") - else: - output = output_node - - output.setInput(0, pre_node) - - return group_node - - def update(self, container, context): - repre_entity = context["representation"] - - group_node = container["node"] - - filepath = get_representation_path(repre_entity) - - json_f = self._load_json_data(filepath) - - group_node = self._create_group_node( - filepath, - json_f["data"], - group_node - ) - - self._node_version_color( - context["project"]["name"], context["version"], group_node - ) - - self.log.info("Updated lut setup: `{}`".format( - group_node["name"].value())) - - return update_container( - group_node, {"representation": repre_entity["id"]}) - - def _load_json_data(self, filepath): - # getting data from json file with unicode conversion - with open(filepath, "r") as _file: - json_f = {self._bytify(key): self._bytify(value) - for key, value in json.load(_file).items()} - - # check if the version in json_f is the same as plugin version - if json_f["version"] != self.schema_version: - raise KeyError( - "Version of json file is not the same as plugin version") - - return json_f - - def _bytify(self, input): - """ - Converts unicode strings to strings - It goes through all dictionary - - Arguments: - input (dict/str): input - - Returns: - dict: with fixed values and keys - - """ - - if isinstance(input, dict): - return {self._bytify(key): self._bytify(value) - for key, value in input.items()} - elif isinstance(input, list): - return [self._bytify(element) for element in input] - elif isinstance(input, six.text_type): - return str(input) - else: - return input - - def switch(self, container, context): - self.update(container, context) - - def remove(self, container): - node = nuke.toNode(container['objectName']) - with viewer_update_and_undo_stop(): - nuke.delete(node) - - def _node_version_color(self, project_name, version_entity, node): - """ Coloring a node by correct color by actual version""" - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = self.current_node_color - else: - color_value = self.old_node_color - node["tile_color"].setValue(int(color_value, 16)) - - -def _colorspace_name_by_type(colorspace_data): - """ - Returns colorspace name by type - - Arguments: - colorspace_data (dict): colorspace data - - Returns: - str: colorspace name - """ - if colorspace_data["type"] == "colorspaces": - return colorspace_data["name"] - elif colorspace_data["type"] == "roles": - return colorspace_data["colorspace"] - else: - raise KeyError("Unknown colorspace type: {}".format( - colorspace_data["type"])) - - -def _add_ocio_colorspace_node(pre_node, input_space, output_space): - """ - Adds OCIOColorSpace node to the node graph - - Arguments: - pre_node (nuke.Node): node to connect to - input_space (str): input colorspace - output_space (str): output colorspace - - Returns: - nuke.Node: node with OCIOColorSpace node - """ - node = nuke.createNode("OCIOColorSpace") - node.setInput(0, pre_node) - node["in_colorspace"].setValue(input_space) - node["out_colorspace"].setValue(output_space) - - pre_node.autoplace() - node.setInput(0, pre_node) - node.autoplace() - - return node diff --git a/server_addon/nuke/client/ayon_nuke/plugins/load/load_script_precomp.py b/server_addon/nuke/client/ayon_nuke/plugins/load/load_script_precomp.py deleted file mode 100644 index cf543dabfd..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/load/load_script_precomp.py +++ /dev/null @@ -1,162 +0,0 @@ -import nuke -import ayon_api - -from ayon_core.pipeline import ( - load, - get_representation_path, -) -from ayon_nuke.api.lib import get_avalon_knob_data -from ayon_nuke.api import ( - containerise, - update_container, - viewer_update_and_undo_stop -) - - -class LinkAsGroup(load.LoaderPlugin): - """Copy the published file to be pasted at the desired location""" - - product_types = {"workfile", "nukenodes"} - representations = {"*"} - extensions = {"nk"} - - settings_category = "nuke" - - label = "Load Precomp" - order = 0 - icon = "file" - color = "#cc0000" - - def load(self, context, name, namespace, data): - # for k, v in context.items(): - # log.info("key: `{}`, value: {}\n".format(k, v)) - version_entity = context["version"] - - version_attributes = version_entity["attrib"] - first = version_attributes.get("frameStart") - last = version_attributes.get("frameEnd") - colorspace = version_attributes.get("colorSpace") - - # Fallback to folder name when namespace is None - if namespace is None: - namespace = context["folder"]["name"] - - file = self.filepath_from_context(context).replace("\\", "/") - self.log.info("file: {}\n".format(file)) - - data_imprint = { - "startingFrame": first, - "frameStart": first, - "frameEnd": last, - "version": version_entity["version"] - } - # add additional metadata from the version to imprint to Avalon knob - for k in [ - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "source", - "fps" - ]: - data_imprint[k] = version_attributes[k] - - # group context is set to precomp, so back up one level. - nuke.endGroup() - - # P = nuke.nodes.LiveGroup("file {}".format(file)) - P = nuke.createNode( - "Precomp", - "file {}".format(file), - inpanel=False - ) - - # Set colorspace defined in version data - self.log.info("colorspace: {}\n".format(colorspace)) - - P["name"].setValue("{}_{}".format(name, namespace)) - P["useOutput"].setValue(True) - - with P: - # iterate through all nodes in group node and find pype writes - writes = [n.name() for n in nuke.allNodes() - if n.Class() == "Group" - if get_avalon_knob_data(n)] - - if writes: - # create panel for selecting output - panel_choices = " ".join(writes) - panel_label = "Select write node for output" - p = nuke.Panel("Select Write Node") - p.addEnumerationPulldown( - panel_label, panel_choices) - p.show() - P["output"].setValue(p.value(panel_label)) - - P["tile_color"].setValue(0xff0ff0ff) - - return containerise( - node=P, - name=name, - namespace=namespace, - context=context, - loader=self.__class__.__name__, - data=data_imprint) - - def switch(self, container, context): - self.update(container, context) - - def update(self, container, context): - """Update the Loader's path - - Nuke automatically tries to reset some variables when changing - the loader's path to a new file. These automatic changes are to its - inputs: - - """ - node = container["node"] - - project_name = context["project"]["name"] - version_entity = context["version"] - repre_entity = context["representation"] - - root = get_representation_path(repre_entity).replace("\\", "/") - - # Get start frame from version data - - version_attributes = version_entity["attrib"] - updated_dict = { - "representation": repre_entity["id"], - "frameEnd": version_attributes.get("frameEnd"), - "version": version_entity["version"], - "colorspace": version_attributes.get("colorSpace"), - "source": version_attributes.get("source"), - "fps": version_attributes.get("fps"), - } - - # Update the imprinted representation - update_container( - node, - updated_dict - ) - - node["file"].setValue(root) - - last_version_entity = ayon_api.get_last_version_by_product_id( - project_name, version_entity["productId"], fields={"id"} - ) - # change color of node - if version_entity["id"] == last_version_entity["id"]: - color_value = "0xff0ff0ff" - else: - color_value = "0xd84f20ff" - node["tile_color"].setValue(int(color_value, 16)) - - self.log.info( - "updated to version: {}".format(version_entity["version"]) - ) - - def remove(self, container): - node = container["node"] - with viewer_update_and_undo_stop(): - nuke.delete(node) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_backdrop.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_backdrop.py deleted file mode 100644 index 1471159380..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_backdrop.py +++ /dev/null @@ -1,62 +0,0 @@ -from pprint import pformat -import pyblish.api -from ayon_nuke.api import lib as pnlib -import nuke - - -class CollectBackdrops(pyblish.api.InstancePlugin): - """Collect Backdrop node instance and its content - """ - - order = pyblish.api.CollectorOrder + 0.22 - label = "Collect Backdrop" - hosts = ["nuke"] - families = ["nukenodes"] - - settings_category = "nuke" - - def process(self, instance): - self.log.debug(pformat(instance.data)) - - bckn = instance.data["transientData"]["node"] - - # define size of the backdrop - left = bckn.xpos() - top = bckn.ypos() - right = left + bckn['bdwidth'].value() - bottom = top + bckn['bdheight'].value() - - instance.data["transientData"]["childNodes"] = [] - # iterate all nodes - for node in nuke.allNodes(): - - # exclude viewer - if node.Class() == "Viewer": - continue - - # find all related nodes - if (node.xpos() > left) \ - and (node.xpos() + node.screenWidth() < right) \ - and (node.ypos() > top) \ - and (node.ypos() + node.screenHeight() < bottom): - - # add contained nodes to instance's node list - instance.data["transientData"]["childNodes"].append(node) - - # get all connections from outside of backdrop - nodes = instance.data["transientData"]["childNodes"] - connections_in, connections_out = pnlib.get_dependent_nodes(nodes) - instance.data["transientData"]["nodeConnectionsIn"] = connections_in - instance.data["transientData"]["nodeConnectionsOut"] = connections_out - - # make label nicer - instance.data["label"] = "{0} ({1} nodes)".format( - bckn.name(), len(instance.data["transientData"]["childNodes"])) - - # get version - version = instance.context.data.get('version') - - if version: - instance.data['version'] = version - - self.log.debug("Backdrop instance collected: `{}`".format(instance)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_context_data.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_context_data.py deleted file mode 100644 index 33c8e63e82..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_context_data.py +++ /dev/null @@ -1,69 +0,0 @@ -import os -import nuke -import pyblish.api -from ayon_core.lib import get_version_from_path -import ayon_nuke.api as napi -from ayon_core.pipeline import KnownPublishError - - -class CollectContextData(pyblish.api.ContextPlugin): - """Collect current context publish.""" - - order = pyblish.api.CollectorOrder - 0.499 - label = "Collect context data" - hosts = ['nuke'] - - settings_category = "nuke" - - def process(self, context): # sourcery skip: avoid-builtin-shadow - root_node = nuke.root() - - current_file = os.path.normpath(root_node.name()) - - if current_file.lower() == "root": - raise KnownPublishError( - "Workfile is not correct file name. \n" - "Use workfile tool to manage the name correctly." - ) - - # Get frame range - first_frame = int(root_node["first_frame"].getValue()) - last_frame = int(root_node["last_frame"].getValue()) - - # get instance data from root - root_instance_context = napi.get_node_data( - root_node, napi.INSTANCE_DATA_KNOB - ) - - handle_start = root_instance_context["handleStart"] - handle_end = root_instance_context["handleEnd"] - - # Get format - format = root_node['format'].value() - resolution_width = format.width() - resolution_height = format.height() - pixel_aspect = format.pixelAspect() - - script_data = { - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height, - "pixelAspect": pixel_aspect, - - "handleStart": handle_start, - "handleEnd": handle_end, - "step": 1, - "fps": root_node['fps'].value(), - - "currentFile": current_file, - "version": int(get_version_from_path(current_file)), - - "host": pyblish.api.current_host(), - "hostVersion": nuke.NUKE_VERSION_STRING - } - - context.data["scriptData"] = script_data - context.data.update(script_data) - - self.log.debug('Context from Nuke script collected') diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_framerate.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_framerate.py deleted file mode 100644 index cd77eab0f1..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_framerate.py +++ /dev/null @@ -1,19 +0,0 @@ -import nuke - -import pyblish.api - - -class CollectFramerate(pyblish.api.ContextPlugin): - """Collect framerate.""" - - order = pyblish.api.CollectorOrder - label = "Collect Framerate" - hosts = [ - "nuke", - "nukeassist" - ] - - settings_category = "nuke" - - def process(self, context): - context.data["fps"] = nuke.root()["fps"].getValue() diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_gizmo.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_gizmo.py deleted file mode 100644 index ece9823b37..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_gizmo.py +++ /dev/null @@ -1,49 +0,0 @@ -import pyblish.api -import nuke - - -class CollectGizmo(pyblish.api.InstancePlugin): - """Collect Gizmo (group) node instance and its content - """ - - order = pyblish.api.CollectorOrder + 0.22 - label = "Collect Gizmo (group)" - hosts = ["nuke"] - families = ["gizmo"] - - settings_category = "nuke" - - def process(self, instance): - - gizmo_node = instance.data["transientData"]["node"] - - # add product type to familiess - instance.data["families"].insert(0, instance.data["productType"]) - # make label nicer - instance.data["label"] = gizmo_node.name() - - # Get frame range - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - families = [instance.data["productType"]] + instance.data["families"] - - # Add version data to instance - version_data = { - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "colorspace": nuke.root().knob('workingSpaceLUT').value(), - "families": families, - "productName": instance.data["productName"], - "fps": instance.context.data["fps"] - } - - instance.data.update({ - "versionData": version_data, - "frameStart": first_frame, - "frameEnd": last_frame - }) - self.log.debug("Gizmo instance collected: `{}`".format(instance)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_headless_farm.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_headless_farm.py deleted file mode 100644 index c00b9a8f5d..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_headless_farm.py +++ /dev/null @@ -1,58 +0,0 @@ -import pyblish.api - -from ayon_core.pipeline.publish import ( - AYONPyblishPluginMixin -) - - -class CollectRenderOnFarm(pyblish.api.ContextPlugin): - """Setup instances for render on farm submission.""" - - # Needs to be after CollectFromCreateContext - order = pyblish.api.CollectorOrder - 0.49 - label = "Collect Render On Farm" - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, context): - if not context.data.get("render_on_farm", False): - return - - for instance in context: - if instance.data["family"] == "workfile": - instance.data["active"] = False - continue - - # Filter out all other instances. - node = instance.data["transientData"]["node"] - if node.name() != instance.context.data["node_name"]: - instance.data["active"] = False - continue - - instance.data["families"].append("render_on_farm") - - # Enable for farm publishing. - instance.data["farm"] = True - - # Skip workfile version incremental save. - instance.context.data["increment_script_version"] = False - - -class SetupRenderOnFarm(pyblish.api.InstancePlugin, AYONPyblishPluginMixin): - """Setup instance for render on farm submission.""" - - order = pyblish.api.CollectorOrder + 0.4999 - label = "Setup Render On Farm" - hosts = ["nuke"] - families = ["render_on_farm"] - - def process(self, instance): - # Clear the families as we only want the main family, ei. no review - # etc. - instance.data["families"] = ["render_on_farm"] - - # Use the workfile instead of published. - publish_attributes = instance.data["publish_attributes"] - plugin_attributes = publish_attributes["NukeSubmitDeadline"] - plugin_attributes["use_published_workfile"] = False diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_model.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_model.py deleted file mode 100644 index f4266bbbcb..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_model.py +++ /dev/null @@ -1,48 +0,0 @@ -import pyblish.api -import nuke - - -class CollectModel(pyblish.api.InstancePlugin): - """Collect Model node instance and its content - """ - - order = pyblish.api.CollectorOrder + 0.22 - label = "Collect Model" - hosts = ["nuke"] - families = ["model"] - - settings_category = "nuke" - - def process(self, instance): - - geo_node = instance.data["transientData"]["node"] - - # add product type to familiess - instance.data["families"].insert(0, instance.data["productType"]) - # make label nicer - instance.data["label"] = geo_node.name() - - # Get frame range - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - families = [instance.data["productType"]] + instance.data["families"] - # Add version data to instance - version_data = { - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "colorspace": nuke.root().knob('workingSpaceLUT').value(), - "families": families, - "productName": instance.data["productName"], - "fps": instance.context.data["fps"] - } - - instance.data.update({ - "versionData": version_data, - "frameStart": first_frame, - "frameEnd": last_frame - }) - self.log.debug("Model instance collected: `{}`".format(instance)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_nuke_instance_data.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_nuke_instance_data.py deleted file mode 100644 index d1392a8460..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_nuke_instance_data.py +++ /dev/null @@ -1,57 +0,0 @@ -import nuke -import pyblish.api - - -class CollectInstanceData(pyblish.api.InstancePlugin): - """Collect Nuke instance data - - """ - - order = pyblish.api.CollectorOrder - 0.49 - label = "Collect Nuke Instance Data" - hosts = ["nuke", "nukeassist"] - - settings_category = "nuke" - - # presets - sync_workfile_version_on_families = [] - - def process(self, instance): - product_type = instance.data["productType"] - - # Get format - root = nuke.root() - format_ = root['format'].value() - resolution_width = format_.width() - resolution_height = format_.height() - pixel_aspect = format_.pixelAspect() - - # sync workfile version - if product_type in self.sync_workfile_version_on_families: - self.log.debug( - "Syncing version with workfile for '{}'".format( - product_type - ) - ) - # get version to instance for integration - instance.data['version'] = instance.context.data['version'] - - instance.data.update({ - "step": 1, - "fps": root['fps'].value(), - "resolutionWidth": resolution_width, - "resolutionHeight": resolution_height, - "pixelAspect": pixel_aspect - - }) - - # add creator attributes to instance - creator_attributes = instance.data["creator_attributes"] - instance.data.update(creator_attributes) - - # add review family if review activated on instance - if instance.data.get("review"): - instance.data["families"].append("review") - - self.log.debug("Collected instance: {}".format( - instance.data)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_reads.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_reads.py deleted file mode 100644 index 439374e825..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_reads.py +++ /dev/null @@ -1,124 +0,0 @@ -import os -import re -import nuke -import pyblish.api - - -class CollectNukeReads(pyblish.api.InstancePlugin): - """Collect all read nodes.""" - - order = pyblish.api.CollectorOrder + 0.04 - label = "Collect Source Reads" - hosts = ["nuke", "nukeassist"] - families = ["source"] - - settings_category = "nuke" - - def process(self, instance): - self.log.debug("checking instance: {}".format(instance)) - - node = instance.data["transientData"]["node"] - if node.Class() != "Read": - return - - file_path = node["file"].value() - file_name = os.path.basename(file_path) - items = file_name.split(".") - - if len(items) < 2: - raise ValueError - - ext = items[-1] - - # Get frame range - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = node['first'].value() - last_frame = node['last'].value() - - # colorspace - colorspace = node["colorspace"].value() - if "default" in colorspace: - colorspace = colorspace.replace("default (", "").replace(")", "") - - # # Easier way to sequence - Not tested - # isSequence = True - # if first_frame == last_frame: - # isSequence = False - - isSequence = False - if len(items) > 1: - sequence = items[-2] - hash_regex = re.compile(r'([#*])') - seq_regex = re.compile(r'[%0-9*d]') - hash_match = re.match(hash_regex, sequence) - seq_match = re.match(seq_regex, sequence) - if hash_match or seq_match: - isSequence = True - - # get source path - path = nuke.filename(node) - source_dir = os.path.dirname(path) - self.log.debug('source dir: {}'.format(source_dir)) - - if isSequence: - source_files = [f for f in os.listdir(source_dir) - if ext in f - if items[0] in f] - else: - source_files = file_name - - # Include start and end render frame in label - name = node.name() - label = "{0} ({1}-{2})".format( - name, - int(first_frame), - int(last_frame) - ) - - self.log.debug("collected_frames: {}".format(label)) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': ext, - 'ext': ext, - 'files': source_files, - "stagingDir": source_dir, - "frameStart": "%0{}d".format( - len(str(last_frame))) % first_frame - } - instance.data["representations"].append(representation) - - transfer = node["publish"] if "publish" in node.knobs() else False - instance.data['transfer'] = transfer - - # Add version data to instance - version_data = { - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "colorspace": colorspace, - "families": [instance.data["productType"]], - "productName": instance.data["productName"], - "fps": instance.context.data["fps"] - } - - instance.data.update({ - "versionData": version_data, - "path": path, - "stagingDir": source_dir, - "ext": ext, - "label": label, - "frameStart": first_frame, - "frameEnd": last_frame, - "colorspace": colorspace, - "handleStart": handle_start, - "handleEnd": handle_end, - "step": 1, - "fps": int(nuke.root()['fps'].value()) - }) - - self.log.debug("instance.data: {}".format(instance.data)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_slate_node.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_slate_node.py deleted file mode 100644 index bb3b0083ab..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_slate_node.py +++ /dev/null @@ -1,48 +0,0 @@ -import pyblish.api -import nuke - - -class CollectSlate(pyblish.api.InstancePlugin): - """Check if SLATE node is in scene and connected to rendering tree""" - - order = pyblish.api.CollectorOrder + 0.002 - label = "Collect Slate Node" - hosts = ["nuke"] - families = ["render"] - - settings_category = "nuke" - - def process(self, instance): - node = instance.data["transientData"]["node"] - - slate = next( - ( - n_ for n_ in nuke.allNodes() - if "slate" in n_.name().lower() - if not n_["disable"].getValue() and - "publish_instance" not in n_.knobs() # Exclude instance nodes. - ), - None - ) - - if slate: - # check if slate node is connected to write node tree - slate_check = 0 - slate_node = None - while slate_check == 0: - try: - node = node.dependencies()[0] - if slate.name() in node.name(): - slate_node = node - slate_check = 1 - except IndexError: - break - - if slate_node: - instance.data["slateNode"] = slate_node - instance.data["slate"] = True - instance.data["families"].append("slate") - self.log.debug( - "Slate node is in node graph: `{}`".format(slate.name())) - self.log.debug( - "__ instance.data: `{}`".format(instance.data)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_workfile.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_workfile.py deleted file mode 100644 index e4bd5ed129..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_workfile.py +++ /dev/null @@ -1,44 +0,0 @@ -import os -import nuke -import pyblish.api - - -class CollectWorkfile(pyblish.api.InstancePlugin): - """Collect current script for publish.""" - - order = pyblish.api.CollectorOrder - label = "Collect Workfile" - hosts = ['nuke'] - families = ["workfile"] - - settings_category = "nuke" - - def process(self, instance): # sourcery skip: avoid-builtin-shadow - - script_data = instance.context.data["scriptData"] - current_file = os.path.normpath(nuke.root().name()) - - # creating instances per write node - staging_dir = os.path.dirname(current_file) - base_name = os.path.basename(current_file) - - # creating representation - representation = { - 'name': 'nk', - 'ext': 'nk', - 'files': base_name, - "stagingDir": staging_dir, - } - - # creating instance data - instance.data.update({ - "name": base_name, - "representations": [representation] - }) - - # adding basic script data - instance.data.update(script_data) - - self.log.debug( - "Collected current script version: {}".format(current_file) - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_writes.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_writes.py deleted file mode 100644 index 816f493d72..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/collect_writes.py +++ /dev/null @@ -1,402 +0,0 @@ -import os -import nuke -import pyblish.api -from ayon_nuke import api as napi -from ayon_core.pipeline import publish - - -class CollectNukeWrites(pyblish.api.InstancePlugin, - publish.ColormanagedPyblishPluginMixin): - """Collect all write nodes.""" - - order = pyblish.api.CollectorOrder + 0.0021 - label = "Collect Writes" - hosts = ["nuke", "nukeassist"] - families = ["render", "prerender", "image"] - - settings_category = "nuke" - - # cache - _write_nodes = {} - _frame_ranges = {} - - def process(self, instance): - - group_node = instance.data["transientData"]["node"] - render_target = instance.data["render_target"] - - write_node = self._write_node_helper(instance) - - if write_node is None: - self.log.warning( - "Created node '{}' is missing write node!".format( - group_node.name() - ) - ) - return - - # get colorspace and add to version data - colorspace = napi.get_colorspace_from_node(write_node) - - if render_target == "frames": - self._set_existing_files_data(instance, colorspace) - - elif render_target == "frames_farm": - collected_frames = self._set_existing_files_data( - instance, colorspace) - - self._set_expected_files(instance, collected_frames) - - self._add_farm_instance_data(instance) - - elif render_target == "farm": - self._add_farm_instance_data(instance) - - # set additional instance data - self._set_additional_instance_data(instance, render_target, colorspace) - - def _set_existing_files_data(self, instance, colorspace): - """Set existing files data to instance data. - - Args: - instance (pyblish.api.Instance): pyblish instance - colorspace (str): colorspace - - Returns: - list: collected frames - """ - collected_frames = self._get_collected_frames(instance) - - representation = self._get_existing_frames_representation( - instance, collected_frames - ) - - # inject colorspace data - self.set_representation_colorspace( - representation, instance.context, - colorspace=colorspace - ) - - instance.data["representations"].append(representation) - - return collected_frames - - def _set_expected_files(self, instance, collected_frames): - """Set expected files to instance data. - - Args: - instance (pyblish.api.Instance): pyblish instance - collected_frames (list): collected frames - """ - write_node = self._write_node_helper(instance) - - write_file_path = nuke.filename(write_node) - output_dir = os.path.dirname(write_file_path) - - instance.data["expectedFiles"] = [ - os.path.join(output_dir, source_file) - for source_file in collected_frames - ] - - def _get_frame_range_data(self, instance): - """Get frame range data from instance. - - Args: - instance (pyblish.api.Instance): pyblish instance - - Returns: - tuple: first_frame, last_frame - """ - - instance_name = instance.data["name"] - - if self._frame_ranges.get(instance_name): - # return cashed write node - return self._frame_ranges[instance_name] - - write_node = self._write_node_helper(instance) - - # Get frame range from workfile - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - - # Get frame range from write node if activated - if write_node["use_limit"].getValue(): - first_frame = int(write_node["first"].getValue()) - last_frame = int(write_node["last"].getValue()) - - # add to cache - self._frame_ranges[instance_name] = (first_frame, last_frame) - - return first_frame, last_frame - - def _set_additional_instance_data( - self, instance, render_target, colorspace - ): - """Set additional instance data. - - Args: - instance (pyblish.api.Instance): pyblish instance - render_target (str): render target - colorspace (str): colorspace - """ - product_type = instance.data["productType"] - - # add targeted family to families - instance.data["families"].append( - "{}.{}".format(product_type, render_target) - ) - self.log.debug("Appending render target to families: {}.{}".format( - product_type, render_target) - ) - - write_node = self._write_node_helper(instance) - - # Determine defined file type - ext = write_node["file_type"].value() - - # determine defined channel type - color_channels = write_node["channels"].value() - - # get frame range data - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame, last_frame = self._get_frame_range_data(instance) - - # get output paths - write_file_path = nuke.filename(write_node) - output_dir = os.path.dirname(write_file_path) - - # TODO: remove this when we have proper colorspace support - version_data = { - "colorspace": colorspace - } - - instance.data.update({ - "versionData": version_data, - "path": write_file_path, - "outputDir": output_dir, - "ext": ext, - "colorspace": colorspace, - "color_channels": color_channels - }) - - if product_type == "render": - instance.data.update({ - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - else: - instance.data.update({ - "handleStart": 0, - "handleEnd": 0, - "frameStart": first_frame, - "frameEnd": last_frame, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - - # TODO temporarily set stagingDir as persistent for backward - # compatibility. This is mainly focused on `renders`folders which - # were previously not cleaned up (and could be used in read notes) - # this logic should be removed and replaced with custom staging dir - instance.data["stagingDir_persistent"] = True - - def _write_node_helper(self, instance): - """Helper function to get write node from instance. - - Also sets instance transient data with child nodes. - - Args: - instance (pyblish.api.Instance): pyblish instance - - Returns: - nuke.Node: write node - """ - instance_name = instance.data["name"] - - if self._write_nodes.get(instance_name): - # return cashed write node - return self._write_nodes[instance_name] - - # get all child nodes from group node - child_nodes = napi.get_instance_group_node_childs(instance) - - # set child nodes to instance transient data - instance.data["transientData"]["childNodes"] = child_nodes - - write_node = None - for node_ in child_nodes: - if node_.Class() == "Write": - write_node = node_ - - if write_node: - # for slate frame extraction - instance.data["transientData"]["writeNode"] = write_node - # add to cache - self._write_nodes[instance_name] = write_node - - return self._write_nodes[instance_name] - - def _get_existing_frames_representation( - self, - instance, - collected_frames - ): - """Get existing frames representation. - - Args: - instance (pyblish.api.Instance): pyblish instance - collected_frames (list): collected frames - - Returns: - dict: representation - """ - - first_frame, last_frame = self._get_frame_range_data(instance) - - write_node = self._write_node_helper(instance) - - write_file_path = nuke.filename(write_node) - output_dir = os.path.dirname(write_file_path) - - # Determine defined file type - ext = write_node["file_type"].value() - - representation = { - "name": ext, - "ext": ext, - "stagingDir": output_dir, - "tags": [] - } - - # set slate frame - collected_frames = self._add_slate_frame_to_collected_frames( - instance, - collected_frames, - first_frame, - last_frame - ) - - if len(collected_frames) == 1: - representation['files'] = collected_frames.pop() - else: - representation['files'] = collected_frames - - return representation - - def _get_frame_start_str(self, first_frame, last_frame): - """Get frame start string. - - Args: - first_frame (int): first frame - last_frame (int): last frame - - Returns: - str: frame start string - """ - # convert first frame to string with padding - return ( - "{{:0{}d}}".format(len(str(last_frame))) - ).format(first_frame) - - def _add_slate_frame_to_collected_frames( - self, - instance, - collected_frames, - first_frame, - last_frame - ): - """Add slate frame to collected frames. - - Args: - instance (pyblish.api.Instance): pyblish instance - collected_frames (list): collected frames - first_frame (int): first frame - last_frame (int): last frame - - Returns: - list: collected frames - """ - frame_start_str = self._get_frame_start_str(first_frame, last_frame) - frame_length = int(last_frame - first_frame + 1) - - # this will only run if slate frame is not already - # rendered from previews publishes - if ( - "slate" in instance.data["families"] - and frame_length == len(collected_frames) - ): - frame_slate_str = self._get_frame_start_str( - first_frame - 1, - last_frame - ) - - slate_frame = collected_frames[0].replace( - frame_start_str, frame_slate_str) - collected_frames.insert(0, slate_frame) - - return collected_frames - - def _add_farm_instance_data(self, instance): - """Add farm publishing related instance data. - - Args: - instance (pyblish.api.Instance): pyblish instance - """ - - # make sure rendered sequence on farm will - # be used for extract review - if not instance.data.get("review"): - instance.data["useSequenceForReview"] = False - - # Farm rendering - instance.data.update({ - "transfer": False, - "farm": True # to skip integrate - }) - self.log.info("Farm rendering ON ...") - - def _get_collected_frames(self, instance): - """Get collected frames. - - Args: - instance (pyblish.api.Instance): pyblish instance - - Returns: - list: collected frames - """ - - first_frame, last_frame = self._get_frame_range_data(instance) - - write_node = self._write_node_helper(instance) - - write_file_path = nuke.filename(write_node) - output_dir = os.path.dirname(write_file_path) - - # get file path knob - node_file_knob = write_node["file"] - # list file paths based on input frames - expected_paths = list(sorted({ - node_file_knob.evaluate(frame) - for frame in range(first_frame, last_frame + 1) - })) - - # convert only to base names - expected_filenames = { - os.path.basename(filepath) - for filepath in expected_paths - } - - # make sure files are existing at folder - collected_frames = [ - filename - for filename in os.listdir(output_dir) - if filename in expected_filenames - ] - - return collected_frames diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_backdrop.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_backdrop.py deleted file mode 100644 index 8c42920979..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_backdrop.py +++ /dev/null @@ -1,106 +0,0 @@ -import os - -import nuke - -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api.lib import ( - maintained_selection, - reset_selection, - select_nodes -) - - -class ExtractBackdropNode(publish.Extractor): - """Extracting content of backdrop nodes - - Will create nuke script only with containing nodes. - Also it will solve Input and Output nodes. - - """ - - order = pyblish.api.ExtractorOrder - label = "Extract Backdrop" - hosts = ["nuke"] - families = ["nukenodes"] - - settings_category = "nuke" - - def process(self, instance): - tmp_nodes = [] - child_nodes = instance.data["transientData"]["childNodes"] - # all connections outside of backdrop - connections_in = instance.data["transientData"]["nodeConnectionsIn"] - connections_out = instance.data["transientData"]["nodeConnectionsOut"] - self.log.debug("_ connections_in: `{}`".format(connections_in)) - self.log.debug("_ connections_out: `{}`".format(connections_out)) - - # Define extract output file path - stagingdir = self.staging_dir(instance) - filename = "{0}.nk".format(instance.name) - path = os.path.join(stagingdir, filename) - - # maintain selection - with maintained_selection(): - # create input child_nodes and name them as passing node (*_INP) - for n, inputs in connections_in.items(): - for i, input in inputs: - inpn = nuke.createNode("Input") - inpn["name"].setValue("{}_{}_INP".format(n.name(), i)) - n.setInput(i, inpn) - inpn.setXYpos(input.xpos(), input.ypos()) - child_nodes.append(inpn) - tmp_nodes.append(inpn) - - reset_selection() - - # connect output node - for n, output in connections_out.items(): - opn = nuke.createNode("Output") - output.setInput( - next((i for i, d in enumerate(output.dependencies()) - if d.name() in n.name()), 0), opn) - opn.setInput(0, n) - opn.autoplace() - child_nodes.append(opn) - tmp_nodes.append(opn) - reset_selection() - - # select child_nodes to copy - reset_selection() - select_nodes(child_nodes) - # create tmp nk file - # save file to the path - nuke.nodeCopy(path) - - # Clean up - for tn in tmp_nodes: - nuke.delete(tn) - - # restore original connections - # reconnect input node - for n, inputs in connections_in.items(): - for i, input in inputs: - n.setInput(i, input) - - # reconnect output node - for n, output in connections_out.items(): - output.setInput( - next((i for i, d in enumerate(output.dependencies()) - if d.name() in n.name()), 0), n) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - # create representation - representation = { - 'name': 'nk', - 'ext': 'nk', - 'files': filename, - "stagingDir": stagingdir - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '{}' to: {}".format( - instance.name, path)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_camera.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_camera.py deleted file mode 100644 index 83914087e3..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_camera.py +++ /dev/null @@ -1,185 +0,0 @@ -import os -import math - -import nuke - -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api.lib import maintained_selection - - -class ExtractCamera(publish.Extractor): - """ 3D camera extractor - """ - label = 'Extract Camera' - order = pyblish.api.ExtractorOrder - families = ["camera"] - hosts = ["nuke"] - - settings_category = "nuke" - - # presets - write_geo_knobs = [ - ("file_type", "abc"), - ("storageFormat", "Ogawa"), - ("writeGeometries", False), - ("writePointClouds", False), - ("writeAxes", False) - ] - - def process(self, instance): - camera_node = instance.data["transientData"]["node"] - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - step = 1 - output_range = str(nuke.FrameRange(first_frame, last_frame, step)) - - rm_nodes = [] - self.log.debug("Creating additional nodes for 3D Camera Extractor") - product_name = instance.data["productName"] - staging_dir = self.staging_dir(instance) - - # get extension form preset - extension = next((k[1] for k in self.write_geo_knobs - if k[0] == "file_type"), None) - if not extension: - raise RuntimeError( - "Bad config for extension in presets. " - "Talk to your supervisor or pipeline admin") - - # create file name and path - filename = product_name + ".{}".format(extension) - file_path = os.path.join(staging_dir, filename).replace("\\", "/") - - with maintained_selection(): - # bake camera with axeses onto word coordinate XYZ - rm_n = bakeCameraWithAxeses( - camera_node, output_range) - rm_nodes.append(rm_n) - - # create scene node - rm_n = nuke.createNode("Scene") - rm_nodes.append(rm_n) - - # create write geo node - wg_n = nuke.createNode("WriteGeo") - wg_n["file"].setValue(file_path) - # add path to write to - for k, v in self.write_geo_knobs: - wg_n[k].setValue(v) - rm_nodes.append(wg_n) - - # write out camera - nuke.execute( - wg_n, - int(first_frame), - int(last_frame) - ) - # erase additional nodes - for n in rm_nodes: - nuke.delete(n) - - # create representation data - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': extension, - 'ext': extension, - 'files': filename, - "stagingDir": staging_dir, - "frameStart": first_frame, - "frameEnd": last_frame - } - instance.data["representations"].append(representation) - - instance.data.update({ - "path": file_path, - "outputDir": staging_dir, - "ext": extension, - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - - self.log.debug("Extracted instance '{0}' to: {1}".format( - instance.name, file_path)) - - -def bakeCameraWithAxeses(camera_node, output_range): - """ Baking all perent hierarchy of axeses into camera - with transposition onto word XYZ coordinance - """ - bakeFocal = False - bakeHaperture = False - bakeVaperture = False - - camera_matrix = camera_node['world_matrix'] - - new_cam_n = nuke.createNode("Camera2") - new_cam_n.setInput(0, None) - new_cam_n['rotate'].setAnimated() - new_cam_n['translate'].setAnimated() - - old_focal = camera_node['focal'] - if old_focal.isAnimated() and not (old_focal.animation(0).constant()): - new_cam_n['focal'].setAnimated() - bakeFocal = True - else: - new_cam_n['focal'].setValue(old_focal.value()) - - old_haperture = camera_node['haperture'] - if old_haperture.isAnimated() and not ( - old_haperture.animation(0).constant()): - new_cam_n['haperture'].setAnimated() - bakeHaperture = True - else: - new_cam_n['haperture'].setValue(old_haperture.value()) - - old_vaperture = camera_node['vaperture'] - if old_vaperture.isAnimated() and not ( - old_vaperture.animation(0).constant()): - new_cam_n['vaperture'].setAnimated() - bakeVaperture = True - else: - new_cam_n['vaperture'].setValue(old_vaperture.value()) - - new_cam_n['win_translate'].setValue(camera_node['win_translate'].value()) - new_cam_n['win_scale'].setValue(camera_node['win_scale'].value()) - - for x in nuke.FrameRange(output_range): - math_matrix = nuke.math.Matrix4() - for y in range(camera_matrix.height()): - for z in range(camera_matrix.width()): - matrix_pointer = z + (y * camera_matrix.width()) - math_matrix[matrix_pointer] = camera_matrix.getValueAt( - x, (y + (z * camera_matrix.width()))) - - rot_matrix = nuke.math.Matrix4(math_matrix) - rot_matrix.rotationOnly() - rot = rot_matrix.rotationsZXY() - - new_cam_n['rotate'].setValueAt(math.degrees(rot[0]), x, 0) - new_cam_n['rotate'].setValueAt(math.degrees(rot[1]), x, 1) - new_cam_n['rotate'].setValueAt(math.degrees(rot[2]), x, 2) - new_cam_n['translate'].setValueAt( - camera_matrix.getValueAt(x, 3), x, 0) - new_cam_n['translate'].setValueAt( - camera_matrix.getValueAt(x, 7), x, 1) - new_cam_n['translate'].setValueAt( - camera_matrix.getValueAt(x, 11), x, 2) - - if bakeFocal: - new_cam_n['focal'].setValueAt(old_focal.getValueAt(x), x) - if bakeHaperture: - new_cam_n['haperture'].setValueAt(old_haperture.getValueAt(x), x) - if bakeVaperture: - new_cam_n['vaperture'].setValueAt(old_vaperture.getValueAt(x), x) - - return new_cam_n diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_gizmo.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_gizmo.py deleted file mode 100644 index 05e3164163..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_gizmo.py +++ /dev/null @@ -1,91 +0,0 @@ -import os -import nuke - -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api import utils as pnutils -from ayon_nuke.api.lib import ( - maintained_selection, - reset_selection, - select_nodes -) - - -class ExtractGizmo(publish.Extractor): - """Extracting Gizmo (Group) node - - Will create nuke script only with the Gizmo node. - """ - - order = pyblish.api.ExtractorOrder - label = "Extract Gizmo (group)" - hosts = ["nuke"] - families = ["gizmo"] - - settings_category = "nuke" - - def process(self, instance): - tmp_nodes = [] - orig_grpn = instance.data["transientData"]["node"] - - # Define extract output file path - stagingdir = self.staging_dir(instance) - filename = "{0}.nk".format(instance.name) - path = os.path.join(stagingdir, filename) - - # maintain selection - with maintained_selection(): - orig_grpn_name = orig_grpn.name() - tmp_grpn_name = orig_grpn_name + "_tmp" - # select original group node - select_nodes([orig_grpn]) - - # copy to clipboard - nuke.nodeCopy("%clipboard%") - - # reset selection to none - reset_selection() - - # paste clipboard - nuke.nodePaste("%clipboard%") - - # assign pasted node - copy_grpn = nuke.selectedNode() - copy_grpn.setXYpos((orig_grpn.xpos() + 120), orig_grpn.ypos()) - - # convert gizmos to groups - pnutils.bake_gizmos_recursively(copy_grpn) - - # add to temporary nodes - tmp_nodes.append(copy_grpn) - - # swap names - orig_grpn.setName(tmp_grpn_name) - copy_grpn.setName(orig_grpn_name) - - # create tmp nk file - # save file to the path - nuke.nodeCopy(path) - - # Clean up - for tn in tmp_nodes: - nuke.delete(tn) - - # rename back to original - orig_grpn.setName(orig_grpn_name) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - # create representation - representation = { - 'name': 'gizmo', - 'ext': 'nk', - 'files': filename, - "stagingDir": stagingdir - } - instance.data["representations"].append(representation) - - self.log.debug("Extracted instance '{}' to: {}".format( - instance.name, path)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_headless_farm.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_headless_farm.py deleted file mode 100644 index 4721fe4462..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_headless_farm.py +++ /dev/null @@ -1,38 +0,0 @@ -import os -from datetime import datetime -import shutil - -import pyblish.api - -from ayon_core.pipeline import registered_host - - -class ExtractRenderOnFarm(pyblish.api.InstancePlugin): - """Copy the workfile to a timestamped copy.""" - - order = pyblish.api.ExtractorOrder + 0.499 - label = "Extract Render On Farm" - hosts = ["nuke"] - families = ["render_on_farm"] - - settings_category = "nuke" - - def process(self, instance): - if not instance.context.data.get("render_on_farm", False): - return - - host = registered_host() - current_datetime = datetime.now() - formatted_timestamp = current_datetime.strftime("%Y%m%d%H%M%S") - base, ext = os.path.splitext(host.current_file()) - - directory = os.path.join(os.path.dirname(base), "farm_submissions") - if not os.path.exists(directory): - os.makedirs(directory) - - filename = "{}_{}{}".format( - os.path.basename(base), formatted_timestamp, ext - ) - path = os.path.join(directory, filename).replace("\\", "/") - instance.context.data["currentFile"] = path - shutil.copy(host.current_file(), path) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_model.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_model.py deleted file mode 100644 index 58b9d4179b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_model.py +++ /dev/null @@ -1,110 +0,0 @@ -import os -from pprint import pformat -import nuke -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api.lib import ( - maintained_selection, - select_nodes -) - - -class ExtractModel(publish.Extractor): - """ 3D model extractor - """ - label = 'Extract Model' - order = pyblish.api.ExtractorOrder - families = ["model"] - hosts = ["nuke"] - - settings_category = "nuke" - - # presets - write_geo_knobs = [ - ("file_type", "abc"), - ("storageFormat", "Ogawa"), - ("writeGeometries", True), - ("writePointClouds", False), - ("writeAxes", False) - ] - - def process(self, instance): - handle_start = instance.context.data["handleStart"] - handle_end = instance.context.data["handleEnd"] - first_frame = int(nuke.root()["first_frame"].getValue()) - last_frame = int(nuke.root()["last_frame"].getValue()) - - self.log.debug("instance.data: `{}`".format( - pformat(instance.data))) - - rm_nodes = [] - model_node = instance.data["transientData"]["node"] - - self.log.debug("Creating additional nodes for Extract Model") - product_name = instance.data["productName"] - staging_dir = self.staging_dir(instance) - - extension = next((k[1] for k in self.write_geo_knobs - if k[0] == "file_type"), None) - if not extension: - raise RuntimeError( - "Bad config for extension in presets. " - "Talk to your supervisor or pipeline admin") - - # create file name and path - filename = product_name + ".{}".format(extension) - file_path = os.path.join(staging_dir, filename).replace("\\", "/") - - with maintained_selection(): - # select model node - select_nodes([model_node]) - - # create write geo node - wg_n = nuke.createNode("WriteGeo") - wg_n["file"].setValue(file_path) - # add path to write to - for k, v in self.write_geo_knobs: - wg_n[k].setValue(v) - rm_nodes.append(wg_n) - - # write out model - nuke.execute( - wg_n, - int(first_frame), - int(last_frame) - ) - # erase additional nodes - for n in rm_nodes: - nuke.delete(n) - - self.log.debug("Filepath: {}".format(file_path)) - - # create representation data - if "representations" not in instance.data: - instance.data["representations"] = [] - - representation = { - 'name': extension, - 'ext': extension, - 'files': filename, - "stagingDir": staging_dir, - "frameStart": first_frame, - "frameEnd": last_frame - } - instance.data["representations"].append(representation) - - instance.data.update({ - "path": file_path, - "outputDir": staging_dir, - "ext": extension, - "handleStart": handle_start, - "handleEnd": handle_end, - "frameStart": first_frame + handle_start, - "frameEnd": last_frame - handle_end, - "frameStartHandle": first_frame, - "frameEndHandle": last_frame, - }) - - self.log.debug("Extracted instance '{0}' to: {1}".format( - instance.name, file_path)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_ouput_node.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_ouput_node.py deleted file mode 100644 index 52072cddc5..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_ouput_node.py +++ /dev/null @@ -1,45 +0,0 @@ -import nuke -import pyblish.api -from ayon_nuke.api.lib import maintained_selection - - -class CreateOutputNode(pyblish.api.ContextPlugin): - """Adding output node for each output write node - So when latly user will want to Load .nk as LifeGroup or Precomp - Nuke will not complain about missing Output node - """ - label = 'Output Node Create' - order = pyblish.api.ExtractorOrder + 0.4 - families = ["workfile"] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, context): - # capture selection state - with maintained_selection(): - - active_node = [ - inst.data.get("transientData", {}).get("node") - for inst in context - if inst.data.get("transientData", {}).get("node") - if inst.data.get( - "transientData", {}).get("node").Class() != "Root" - ] - - if active_node: - active_node = active_node.pop() - self.log.debug("Active node: {}".format(active_node)) - active_node['selected'].setValue(True) - - # select only instance render node - output_node = nuke.createNode("Output") - - # deselect all and select the original selection - output_node['selected'].setValue(False) - - # save script - nuke.scriptSave() - - # add node to instance node list - context.data["outputNode"] = output_node diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_output_directory.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_output_directory.py deleted file mode 100644 index 45156ca9ae..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_output_directory.py +++ /dev/null @@ -1,26 +0,0 @@ -import os - -import pyblish.api - - -class ExtractOutputDirectory(pyblish.api.InstancePlugin): - """Extracts the output path for any collection or single output_path.""" - - order = pyblish.api.ExtractorOrder - 0.05 - label = "Output Directory" - optional = True - - settings_category = "nuke" - - def process(self, instance): - - path = None - - if "output_path" in instance.data.keys(): - path = instance.data["path"] - - if not path: - return - - if not os.path.exists(os.path.dirname(path)): - os.makedirs(os.path.dirname(path)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_render_local.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_render_local.py deleted file mode 100644 index c865684e7a..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_render_local.py +++ /dev/null @@ -1,218 +0,0 @@ -import os -import shutil - -import pyblish.api -import clique -import nuke -from ayon_nuke import api as napi -from ayon_core.pipeline import publish -from ayon_core.lib import collect_frames - - -class NukeRenderLocal(publish.Extractor, - publish.ColormanagedPyblishPluginMixin): - """Render the current Nuke composition locally. - - Extract the result of savers by starting a comp render - This will run the local render of Fusion. - - Allows to use last published frames and overwrite only specific ones - (set in instance.data.get("frames_to_fix")) - """ - - order = pyblish.api.ExtractorOrder - label = "Render Local" - hosts = ["nuke"] - families = ["render.local", "prerender.local", "image.local"] - - settings_category = "nuke" - - def process(self, instance): - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - node = None - for x in child_nodes: - if x.Class() == "Write": - node = x - - self.log.debug("instance collected: {}".format(instance.data)) - - node_product_name = instance.data.get("name", None) - - first_frame = instance.data.get("frameStartHandle", None) - last_frame = instance.data.get("frameEndHandle", None) - - filenames = [] - node_file = node["file"] - # Collect expected filepaths for each frame - # - for cases that output is still image is first created set of - # paths which is then sorted and converted to list - expected_paths = list(sorted({ - node_file.evaluate(frame) - for frame in range(first_frame, last_frame + 1) - })) - # Extract only filenames for representation - filenames.extend([ - os.path.basename(filepath) - for filepath in expected_paths - ]) - - # Ensure output directory exists. - out_dir = os.path.dirname(expected_paths[0]) - if not os.path.exists(out_dir): - os.makedirs(out_dir) - - frames_to_render = [(first_frame, last_frame)] - - frames_to_fix = instance.data.get("frames_to_fix") - if instance.data.get("last_version_published_files") and frames_to_fix: - frames_to_render = self._get_frames_to_render(frames_to_fix) - anatomy = instance.context.data["anatomy"] - self._copy_last_published(anatomy, instance, out_dir, - filenames) - - for render_first_frame, render_last_frame in frames_to_render: - - self.log.info("Starting render") - self.log.info("Start frame: {}".format(render_first_frame)) - self.log.info("End frame: {}".format(render_last_frame)) - - # Render frames - nuke.execute( - str(node_product_name), - int(render_first_frame), - int(render_last_frame) - ) - - ext = node["file_type"].value() - colorspace = napi.get_colorspace_from_node(node) - - if "representations" not in instance.data: - instance.data["representations"] = [] - - if len(filenames) == 1: - repre = { - 'name': ext, - 'ext': ext, - 'files': filenames[0], - "stagingDir": out_dir - } - else: - repre = { - 'name': ext, - 'ext': ext, - 'frameStart': ( - "{{:0>{}}}" - .format(len(str(last_frame))) - .format(first_frame) - ), - 'files': filenames, - "stagingDir": out_dir - } - - # inject colorspace data - self.set_representation_colorspace( - repre, instance.context, - colorspace=colorspace - ) - - instance.data["representations"].append(repre) - - self.log.debug("Extracted instance '{0}' to: {1}".format( - instance.name, - out_dir - )) - - families = instance.data["families"] - anatomy_data = instance.data["anatomyData"] - # redefinition of families - if "render.local" in families: - instance.data["family"] = "render" - instance.data["productType"] = "render" - families.remove("render.local") - families.insert(0, "render2d") - anatomy_data["family"] = "render" - anatomy_data["product"]["type"] = "render" - elif "prerender.local" in families: - instance.data["family"] = "prerender" - instance.data["productType"] = "prerender" - families.remove("prerender.local") - families.insert(0, "prerender") - anatomy_data["family"] = "prerender" - anatomy_data["product"]["type"] = "prerender" - elif "image.local" in families: - instance.data["family"] = "image" - instance.data["productType"] = "image" - families.remove("image.local") - anatomy_data["family"] = "image" - anatomy_data["product"]["type"] = "image" - instance.data["families"] = families - - collections, remainder = clique.assemble(filenames) - self.log.debug('collections: {}'.format(str(collections))) - - if collections: - collection = collections[0] - instance.data['collection'] = collection - - self.log.info('Finished render') - - self.log.debug("_ instance.data: {}".format(instance.data)) - - def _copy_last_published(self, anatomy, instance, out_dir, - expected_filenames): - """Copies last published files to temporary out_dir. - - These are base of files which will be extended/fixed for specific - frames. - Renames published file to expected file name based on frame, eg. - test_project_test_asset_product_v005.1001.exr > new_render.1001.exr - """ - last_published = instance.data["last_version_published_files"] - last_published_and_frames = collect_frames(last_published) - - expected_and_frames = collect_frames(expected_filenames) - frames_and_expected = {v: k for k, v in expected_and_frames.items()} - for file_path, frame in last_published_and_frames.items(): - file_path = anatomy.fill_root(file_path) - if not os.path.exists(file_path): - continue - target_file_name = frames_and_expected.get(frame) - if not target_file_name: - continue - - out_path = os.path.join(out_dir, target_file_name) - self.log.debug("Copying '{}' -> '{}'".format(file_path, out_path)) - shutil.copy(file_path, out_path) - - # TODO shouldn't this be uncommented - # instance.context.data["cleanupFullPaths"].append(out_path) - - def _get_frames_to_render(self, frames_to_fix): - """Return list of frame range tuples to render - - Args: - frames_to_fix (str): specific or range of frames to be rerendered - (1005, 1009-1010) - Returns: - (list): [(1005, 1005), (1009-1010)] - """ - frames_to_render = [] - - for frame_range in frames_to_fix.split(","): - if frame_range.isdigit(): - render_first_frame = frame_range - render_last_frame = frame_range - elif '-' in frame_range: - frames = frame_range.split('-') - render_first_frame = int(frames[0]) - render_last_frame = int(frames[1]) - else: - raise ValueError("Wrong format of frames to fix {}" - .format(frames_to_fix)) - frames_to_render.append((render_first_frame, - render_last_frame)) - return frames_to_render diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data.py deleted file mode 100644 index 856616898b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -from pprint import pformat -import pyblish.api - -from ayon_core.pipeline import publish - - -class ExtractReviewData(publish.Extractor): - """Extracts review tag into available representation - """ - - order = pyblish.api.ExtractorOrder + 0.01 - # order = pyblish.api.CollectorOrder + 0.499 - label = "Extract Review Data" - - families = ["review"] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, instance): - fpath = instance.data["path"] - ext = os.path.splitext(fpath)[-1][1:] - - representations = instance.data.get("representations", []) - - # review can be removed since `ProcessSubmittedJobOnFarm` will create - # reviewable representation if needed - if ( - instance.data.get("farm") - and "review" in instance.data["families"] - ): - instance.data["families"].remove("review") - - # iterate representations and add `review` tag - for repre in representations: - if ext != repre["ext"]: - continue - - if not repre.get("tags"): - repre["tags"] = [] - - if "review" not in repre["tags"]: - repre["tags"].append("review") - - self.log.debug("Matching representation: {}".format( - pformat(repre) - )) - - instance.data["representations"] = representations diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data_lut.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data_lut.py deleted file mode 100644 index d3377807ea..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_data_lut.py +++ /dev/null @@ -1,64 +0,0 @@ -import os -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api import plugin -from ayon_nuke.api.lib import maintained_selection - - -class ExtractReviewDataLut(publish.Extractor): - """Extracts movie and thumbnail with baked in luts - - must be run after extract_render_local.py - - """ - - order = pyblish.api.ExtractorOrder + 0.005 - label = "Extract Review Data Lut" - - families = ["review"] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, instance): - self.log.debug("Creating staging dir...") - if "representations" in instance.data: - staging_dir = instance.data[ - "representations"][0]["stagingDir"].replace("\\", "/") - instance.data["stagingDir"] = staging_dir - instance.data["representations"][0]["tags"] = ["review"] - else: - instance.data["representations"] = [] - # get output path - render_path = instance.data['path'] - staging_dir = os.path.normpath(os.path.dirname(render_path)) - instance.data["stagingDir"] = staging_dir - - self.log.debug( - "StagingDir `{0}`...".format(instance.data["stagingDir"])) - - # generate data - with maintained_selection(): - exporter = plugin.ExporterReviewLut( - self, instance - ) - data = exporter.generate_lut() - - # assign to representations - instance.data["lutPath"] = os.path.join( - exporter.stagingDir, exporter.file).replace("\\", "/") - instance.data["representations"] += data["representations"] - - # review can be removed since `ProcessSubmittedJobOnFarm` will create - # reviewable representation if needed - if ( - instance.data.get("farm") - and "review" in instance.data["families"] - ): - instance.data["families"].remove("review") - - self.log.debug( - "_ lutPath: {}".format(instance.data["lutPath"])) - self.log.debug( - "_ representations: {}".format(instance.data["representations"])) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_intermediates.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_intermediates.py deleted file mode 100644 index 48c9988c5b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_review_intermediates.py +++ /dev/null @@ -1,161 +0,0 @@ -import os -import re -from pprint import pformat -import pyblish.api - -from ayon_core.pipeline import publish -from ayon_nuke.api import plugin -from ayon_nuke.api.lib import maintained_selection - - -class ExtractReviewIntermediates(publish.Extractor): - """Extracting intermediate videos or sequences with - thumbnail for transcoding. - - must be run after extract_render_local.py - - """ - - order = pyblish.api.ExtractorOrder + 0.01 - label = "Extract Review Intermediates" - - families = ["review"] - hosts = ["nuke"] - - settings_category = "nuke" - - # presets - viewer_lut_raw = None - outputs = {} - - def process(self, instance): - # TODO 'families' should not be included for filtering of outputs - families = set(instance.data["families"]) - - # Add product type to families - families.add(instance.data["productType"]) - - task_type = instance.context.data["taskType"] - product_name = instance.data["productName"] - self.log.debug("Creating staging dir...") - - if "representations" not in instance.data: - instance.data["representations"] = [] - - staging_dir = os.path.normpath( - os.path.dirname(instance.data["path"])) - - instance.data["stagingDir"] = staging_dir - - self.log.debug( - "StagingDir `{0}`...".format(instance.data["stagingDir"])) - - self.log.debug("Outputs: {}".format(self.outputs)) - - # generate data - with maintained_selection(): - generated_repres = [] - for o_data in self.outputs: - o_name = o_data["name"] - self.log.debug( - "o_name: {}, o_data: {}".format(o_name, pformat(o_data))) - f_product_types = o_data["filter"]["product_types"] - f_task_types = o_data["filter"]["task_types"] - product_names = o_data["filter"]["product_names"] - - self.log.debug( - "f_product_types `{}` > families: {}".format( - f_product_types, families)) - - self.log.debug( - "f_task_types `{}` > task_type: {}".format( - f_task_types, task_type)) - - self.log.debug( - "product_names `{}` > product: {}".format( - product_names, product_name)) - - # test if family found in context - # using intersection to make sure all defined - # families are present in combination - if ( - f_product_types - and not families.intersection(f_product_types) - ): - continue - - # test task types from filter - if f_task_types and task_type not in f_task_types: - continue - - # test products from filter - if product_names and not any( - re.search(p, product_name) for p in product_names - ): - continue - - self.log.debug( - "Baking output `{}` with settings: {}".format( - o_name, o_data) - ) - - # check if settings have more then one preset - # so we dont need to add outputName to representation - # in case there is only one preset - multiple_presets = len(self.outputs) > 1 - - # adding bake presets to instance data for other plugins - if not instance.data.get("bakePresets"): - instance.data["bakePresets"] = {} - # add preset to bakePresets - instance.data["bakePresets"][o_name] = o_data - - # create exporter instance - exporter = plugin.ExporterReviewMov( - self, instance, o_name, o_data["extension"], - multiple_presets) - - delete = not o_data.get("publish", False) - - if instance.data.get("farm"): - if "review" in instance.data["families"]: - instance.data["families"].remove("review") - - data = exporter.generate_mov( - farm=True, delete=delete, **o_data - ) - - self.log.debug( - "_ data: {}".format(data)) - - if not instance.data.get("bakingNukeScripts"): - instance.data["bakingNukeScripts"] = [] - - instance.data["bakingNukeScripts"].append({ - "bakeRenderPath": data.get("bakeRenderPath"), - "bakeScriptPath": data.get("bakeScriptPath"), - "bakeWriteNodeName": data.get("bakeWriteNodeName") - }) - else: - data = exporter.generate_mov(delete=delete, **o_data) - - # add representation generated by exporter - generated_repres.extend(data["representations"]) - self.log.debug( - "__ generated_repres: {}".format(generated_repres)) - - if generated_repres: - # assign to representations - instance.data["representations"] += generated_repres - instance.data["useSequenceForReview"] = False - else: - instance.data["families"].remove("review") - self.log.debug( - "Removing `review` from families. " - "Not available baking profile." - ) - self.log.debug(instance.data["families"]) - - self.log.debug( - "_ representations: {}".format( - instance.data["representations"])) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_script_save.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_script_save.py deleted file mode 100644 index ea584b6529..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_script_save.py +++ /dev/null @@ -1,16 +0,0 @@ -import nuke -import pyblish.api - - -class ExtractScriptSave(pyblish.api.InstancePlugin): - """Save current Nuke workfile script""" - label = 'Script Save' - order = pyblish.api.ExtractorOrder - 0.1 - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, instance): - - self.log.debug('Saving current script') - nuke.scriptSave() diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_slate_frame.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_slate_frame.py deleted file mode 100644 index 47750ea637..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/extract_slate_frame.py +++ /dev/null @@ -1,366 +0,0 @@ -import os -from pprint import pformat -import nuke -import copy - -import pyblish.api -import six - -from ayon_core.pipeline import publish -from ayon_nuke.api import ( - maintained_selection, - duplicate_node, - get_view_process_node -) - - -class ExtractSlateFrame(publish.Extractor): - """Extracts movie and thumbnail with baked in luts - - must be run after extract_render_local.py - - """ - - order = pyblish.api.ExtractorOrder + 0.011 - label = "Extract Slate Frame" - - families = ["slate"] - hosts = ["nuke"] - - settings_category = "nuke" - - # Settings values - key_value_mapping = { - "f_submission_note": { - "enabled": True, "template": "{comment}" - }, - "f_submitting_for": { - "enabled": True, "template": "{intent[value]}" - }, - "f_vfx_scope_of_work": { - "enabled": False, "template": "" - } - } - - def process(self, instance): - - if "representations" not in instance.data: - instance.data["representations"] = [] - - self._create_staging_dir(instance) - - with maintained_selection(): - self.log.debug("instance: {}".format(instance)) - self.log.debug("instance.data[families]: {}".format( - instance.data["families"])) - - if instance.data.get("bakePresets"): - for o_name, o_data in instance.data["bakePresets"].items(): - self.log.debug("_ o_name: {}, o_data: {}".format( - o_name, pformat(o_data))) - self.render_slate( - instance, - o_name, - o_data["bake_viewer_process"], - o_data["bake_viewer_input_process"] - ) - else: - # backward compatibility - self.render_slate(instance) - - # also render image to sequence - self._render_slate_to_sequence(instance) - - def _create_staging_dir(self, instance): - - self.log.debug("Creating staging dir...") - - staging_dir = os.path.normpath( - os.path.dirname(instance.data["path"])) - - instance.data["stagingDir"] = staging_dir - - self.log.debug( - "StagingDir `{0}`...".format(instance.data["stagingDir"])) - - def _check_frames_exists(self, instance): - # rendering path from group write node - fpath = instance.data["path"] - - # instance frame range with handles - first = instance.data["frameStartHandle"] - last = instance.data["frameEndHandle"] - - padding = fpath.count('#') - - test_path_template = fpath - if padding: - repl_string = "#" * padding - test_path_template = fpath.replace( - repl_string, "%0{}d".format(padding)) - - for frame in range(first, last + 1): - test_file = test_path_template % frame - if not os.path.exists(test_file): - self.log.debug("__ test_file: `{}`".format(test_file)) - return None - - return True - - def render_slate( - self, - instance, - output_name=None, - bake_viewer_process=True, - bake_viewer_input_process=True - ): - """Slate frame renderer - - Args: - instance (PyblishInstance): Pyblish instance with product data - output_name (str, optional): - Slate variation name. Defaults to None. - bake_viewer_process (bool, optional): - Switch for viewer profile baking. Defaults to True. - bake_viewer_input_process (bool, optional): - Switch for input process node baking. Defaults to True. - """ - slate_node = instance.data["slateNode"] - - # rendering path from group write node - fpath = instance.data["path"] - - # instance frame range with handles - first_frame = instance.data["frameStartHandle"] - last_frame = instance.data["frameEndHandle"] - - # fill slate node with comments - self.add_comment_slate_node(instance, slate_node) - - # solve output name if any is set - _output_name = output_name or "" - if _output_name: - _output_name = "_" + _output_name - - slate_first_frame = first_frame - 1 - - collection = instance.data.get("collection", None) - - if collection: - # get path - fname = os.path.basename(collection.format( - "{head}{padding}{tail}")) - fhead = collection.format("{head}") - else: - fname = os.path.basename(fpath) - fhead = os.path.splitext(fname)[0] + "." - - if "#" in fhead: - fhead = fhead.replace("#", "")[:-1] - - self.log.debug("__ first_frame: {}".format(first_frame)) - self.log.debug("__ slate_first_frame: {}".format(slate_first_frame)) - - above_slate_node = slate_node.dependencies().pop() - # fallback if files does not exists - if self._check_frames_exists(instance): - # Read node - r_node = nuke.createNode("Read") - r_node["file"].setValue(fpath) - r_node["first"].setValue(first_frame) - r_node["origfirst"].setValue(first_frame) - r_node["last"].setValue(last_frame) - r_node["origlast"].setValue(last_frame) - r_node["colorspace"].setValue(instance.data["colorspace"]) - previous_node = r_node - temporary_nodes = [previous_node] - - # adding copy metadata node for correct frame metadata - cm_node = nuke.createNode("CopyMetaData") - cm_node.setInput(0, previous_node) - cm_node.setInput(1, above_slate_node) - previous_node = cm_node - temporary_nodes.append(cm_node) - - else: - previous_node = above_slate_node - temporary_nodes = [] - - # only create colorspace baking if toggled on - if bake_viewer_process: - if bake_viewer_input_process: - # get input process and connect it to baking - ipn = get_view_process_node() - if ipn is not None: - ipn.setInput(0, previous_node) - previous_node = ipn - temporary_nodes.append(ipn) - - # add duplicate slate node and connect to previous - duply_slate_node = duplicate_node(slate_node) - duply_slate_node.setInput(0, previous_node) - previous_node = duply_slate_node - temporary_nodes.append(duply_slate_node) - - # add viewer display transformation node - dag_node = nuke.createNode("OCIODisplay") - dag_node.setInput(0, previous_node) - previous_node = dag_node - temporary_nodes.append(dag_node) - - else: - # add duplicate slate node and connect to previous - duply_slate_node = duplicate_node(slate_node) - duply_slate_node.setInput(0, previous_node) - previous_node = duply_slate_node - temporary_nodes.append(duply_slate_node) - - # create write node - write_node = nuke.createNode("Write") - file = fhead[:-1] + _output_name + "_slate.png" - path = os.path.join( - instance.data["stagingDir"], file).replace("\\", "/") - - # add slate path to `slateFrames` instance data attr - if not instance.data.get("slateFrames"): - instance.data["slateFrames"] = {} - - instance.data["slateFrames"][output_name or "*"] = path - - # create write node - write_node["file"].setValue(path) - write_node["file_type"].setValue("png") - write_node["raw"].setValue(1) - write_node.setInput(0, previous_node) - temporary_nodes.append(write_node) - - # Render frames - nuke.execute( - write_node.name(), int(slate_first_frame), int(slate_first_frame)) - - # Clean up - for node in temporary_nodes: - nuke.delete(node) - - def _render_slate_to_sequence(self, instance): - # set slate frame - first_frame = instance.data["frameStartHandle"] - last_frame = instance.data["frameEndHandle"] - slate_first_frame = first_frame - 1 - - # render slate as sequence frame - nuke.execute( - instance.data["name"], - int(slate_first_frame), - int(slate_first_frame) - ) - - # Add file to representation files - # - get write node - write_node = instance.data["transientData"]["writeNode"] - # - evaluate filepaths for first frame and slate frame - first_filename = os.path.basename( - write_node["file"].evaluate(first_frame)) - slate_filename = os.path.basename( - write_node["file"].evaluate(slate_first_frame)) - - # Find matching representation based on first filename - matching_repre = None - is_sequence = None - for repre in instance.data["representations"]: - files = repre["files"] - if ( - not isinstance(files, six.string_types) - and first_filename in files - ): - matching_repre = repre - is_sequence = True - break - - elif files == first_filename: - matching_repre = repre - is_sequence = False - break - - if not matching_repre: - self.log.info( - "Matching representation was not found." - " Representation files were not filled with slate." - ) - return - - # Add frame to matching representation files - if not is_sequence: - matching_repre["files"] = [first_filename, slate_filename] - elif slate_filename not in matching_repre["files"]: - matching_repre["files"].insert(0, slate_filename) - matching_repre["frameStart"] = ( - "{{:0>{}}}" - .format(len(str(last_frame))) - .format(slate_first_frame) - ) - self.log.debug( - "__ matching_repre: {}".format(pformat(matching_repre))) - - data = matching_repre.get("data", {}) - data["slateFrames"] = 1 - matching_repre["data"] = data - - self.log.info("Added slate frame to representation files") - - def add_comment_slate_node(self, instance, node): - - comment = instance.data["comment"] - intent = instance.context.data.get("intent") - if not isinstance(intent, dict): - intent = { - "label": intent, - "value": intent - } - - fill_data = copy.deepcopy(instance.data["anatomyData"]) - fill_data.update({ - "custom": copy.deepcopy( - instance.data.get("customData") or {} - ), - "comment": comment, - "intent": intent - }) - - for key, _values in self.key_value_mapping.items(): - if not _values["enabled"]: - self.log.debug("Key \"{}\" is disabled".format(key)) - continue - - template = _values["template"] - try: - value = template.format(**fill_data) - - except ValueError: - self.log.warning( - "Couldn't fill template \"{}\" with data: {}".format( - template, fill_data - ), - exc_info=True - ) - continue - - except KeyError: - self.log.warning( - ( - "Template contains unknown key." - " Template \"{}\" Data: {}" - ).format(template, fill_data), - exc_info=True - ) - continue - - try: - node[key].setValue(value) - self.log.debug("Change key \"{}\" to value \"{}\"".format( - key, value - )) - except NameError: - self.log.warning(( - "Failed to set value \"{0}\" on node attribute \"{0}\"" - ).format(value)) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_asset_context.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_asset_context.xml deleted file mode 100644 index 1e7d340a13..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_asset_context.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - - Folder path - -## Publishing to a different folder context - -There are publish instances present which are publishing into a different folder than your current context. - -Usually this is not what you want but there can be cases where you might want to publish into another folder/shot or task. - -If that's the case you can disable the validation on the instance to ignore it. - -The wrong node's name is: \`{node_name}\` - -### Correct context keys and values: - -\`{correct_values}\` - -### Wrong keys and values: - -\`{wrong_values}\`. - - -## How to repair? - -1. Use \"Repair\" button. -2. Hit Reload button on the publisher. - - - diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_backdrop.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_backdrop.xml deleted file mode 100644 index ab1b650773..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_backdrop.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - Found multiple outputs - -## Invalid output amount - -Backdrop is having more than one outgoing connections. - -### How to repair? - -1. Use button `Center node in node graph` and navigate to the backdrop. -2. Reorganize nodes the way only one outgoing connection is present. -3. Hit reload button on the publisher. - - -### How could this happen? - -More than one node, which are found above the backdrop, are linked downstream or more output connections from a node also linked downstream. - - - - Empty backdrop - -## Invalid empty backdrop - -Backdrop is empty and no nodes are found above it. - -### How to repair? - -1. Use button `Center node in node graph` and navigate to the backdrop. -2. Add any node above it or delete it. -3. Hit reload button on the publisher. - - - \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_gizmo.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_gizmo.xml deleted file mode 100644 index f39a41a4f9..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_gizmo.xml +++ /dev/null @@ -1,36 +0,0 @@ - - - - Found multiple outputs - -## Invalid amount of Output nodes - -Group node `{node_name}` is having more than one Output node. - -### How to repair? - -1. Use button `Open Group`. -2. Remove redundant Output node. -3. Hit reload button on the publisher. - - -### How could this happen? - -Perhaps you had created exciently more than one Output node. - - - - Missing Input nodes - -## Missing Input nodes - -Make sure there is at least one connected Input node inside the group node with name `{node_name}` - -### How to repair? - -1. Use button `Open Group`. -2. Add at least one Input node and connect to other nodes. -3. Hit reload button on the publisher. - - - \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_knobs.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_knobs.xml deleted file mode 100644 index 76c184f653..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_knobs.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - Knobs value - -## Invalid node's knobs values - -Following node knobs needs to be repaired: - -{invalid_items} - -### How to repair? - -1. Use Repair button. -2. Hit Reload button on the publisher. - - - \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_output_resolution.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_output_resolution.xml deleted file mode 100644 index 08a88a993e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_output_resolution.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - - Output format - -## Invalid format setting - -Either the Reformat node inside of the render group is missing or the Reformat node output format knob is not set to `root.format`. - -### How to repair? - -1. Use Repair button. -2. Hit Reload button on the publisher. - - - \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_proxy_mode.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_proxy_mode.xml deleted file mode 100644 index 6fe5d5d43e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_proxy_mode.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - - Proxy mode - -## Invalid proxy mode value - -Nuke is set to use Proxy. This is not supported by publisher. - -### How to repair? - -1. Use Repair button. -2. Hit Reload button on the publisher. - - - \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_rendered_frames.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_rendered_frames.xml deleted file mode 100644 index 434081c269..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_rendered_frames.xml +++ /dev/null @@ -1,17 +0,0 @@ - - - - Rendered Frames - -## Missing Rendered Frames - -Render node "{node_name}" is set to "Use existing frames", but frames are missing. - -### How to repair? - -1. Use Repair button. -2. Set different target. -2. Hit Reload button on the publisher. - - - \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_script_attributes.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_script_attributes.xml deleted file mode 100644 index 871fc629ce..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_script_attributes.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - Script attributes - -## Invalid Script attributes - -Following script root attributes need to be fixed: - -{failed_attributes} - -### How to repair? - -1. Use Repair. -2. Hit Reload button on the publisher. - - - \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_write_nodes.xml b/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_write_nodes.xml deleted file mode 100644 index 96aa6e4494..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/help/validate_write_nodes.xml +++ /dev/null @@ -1,32 +0,0 @@ - - - - Knobs values - - ## Invalid node's knobs values - - Following write node knobs needs to be repaired: - - {xml_msg} - - ### How to repair? - - 1. Use Repair button. - 2. Hit Reload button on the publisher. - - - - Legacy knob types - - ## Knobs are in obsolete configuration - - Settings needs to be fixed. - - ### How to repair? - - Contact your supervisor or fix it in project settings at - 'project_settings/nuke/imageio/nodes/required_nodes' at knobs. - Each '__legacy__' type has to be defined accordingly to its type. - - - \ No newline at end of file diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/increment_script_version.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/increment_script_version.py deleted file mode 100644 index 36659aa2d2..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/increment_script_version.py +++ /dev/null @@ -1,26 +0,0 @@ -import nuke -import pyblish.api - - -class IncrementScriptVersion(pyblish.api.ContextPlugin): - """Increment current script version.""" - - order = pyblish.api.IntegratorOrder + 0.9 - label = "Increment Script Version" - optional = True - families = ["workfile"] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, context): - if not context.data.get("increment_script_version", True): - return - - assert all(result["success"] for result in context.data["results"]), ( - "Publishing not successful so version is not increased.") - - from ayon_core.lib import version_up - path = context.data["currentFile"] - nuke.scriptSaveAs(version_up(path)) - self.log.info('Incrementing script version') diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/remove_ouput_node.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/remove_ouput_node.py deleted file mode 100644 index 4c17cb5f56..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/remove_ouput_node.py +++ /dev/null @@ -1,24 +0,0 @@ -import nuke -import pyblish.api - - -class RemoveOutputNode(pyblish.api.ContextPlugin): - """Removing output node for each output write node - - """ - label = 'Output Node Remove' - order = pyblish.api.IntegratorOrder + 0.4 - families = ["workfile"] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, context): - try: - output_node = context.data["outputNode"] - name = output_node["name"].value() - self.log.info("Removing output node: '{}'".format(name)) - - nuke.delete(output_node) - except Exception: - return diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_asset_context.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_asset_context.py deleted file mode 100644 index 903648fd1b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_asset_context.py +++ /dev/null @@ -1,114 +0,0 @@ -# -*- coding: utf-8 -*- -"""Validate if instance folder is the same as context folder.""" -from __future__ import absolute_import - -import pyblish.api - -from ayon_core.pipeline.publish import ( - RepairAction, - ValidateContentsOrder, - PublishXmlValidationError, - OptionalPyblishPluginMixin -) -from ayon_nuke.api import SelectInstanceNodeAction - - -class ValidateCorrectAssetContext( - pyblish.api.InstancePlugin, - OptionalPyblishPluginMixin -): - """Validator to check if instance folder context match context folder. - - When working in per-shot style you always publish data in context of - current folder (shot). This validator checks if this is so. It is optional - so it can be disabled when needed. - - Checking `folderPath` and `task` keys. - """ - order = ValidateContentsOrder - label = "Validate Folder context" - hosts = ["nuke"] - actions = [ - RepairAction, - SelectInstanceNodeAction - ] - optional = True - - settings_category = "nuke" - - @classmethod - def apply_settings(cls, project_settings): - """Apply deprecated settings from project settings. - """ - nuke_publish = project_settings["nuke"]["publish"] - if "ValidateCorrectAssetName" in nuke_publish: - settings = nuke_publish["ValidateCorrectAssetName"] - else: - settings = nuke_publish["ValidateCorrectAssetContext"] - - cls.enabled = settings["enabled"] - cls.optional = settings["optional"] - cls.active = settings["active"] - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid_keys = self.get_invalid(instance) - - if not invalid_keys: - return - - message_values = { - "node_name": instance.data["transientData"]["node"].name(), - "correct_values": ", ".join([ - "{} > {}".format(_key, instance.context.data[_key]) - for _key in invalid_keys - ]), - "wrong_values": ", ".join([ - "{} > {}".format(_key, instance.data.get(_key)) - for _key in invalid_keys - ]) - } - - msg = ( - "Instance `{node_name}` has wrong context keys:\n" - "Correct: `{correct_values}` | Wrong: `{wrong_values}`").format( - **message_values) - - self.log.debug(msg) - - raise PublishXmlValidationError( - self, msg, formatting_data=message_values - ) - - @classmethod - def get_invalid(cls, instance): - """Get invalid keys from instance data and context data.""" - - invalid_keys = [] - testing_keys = ["folderPath", "task"] - for _key in testing_keys: - if _key not in instance.data: - invalid_keys.append(_key) - continue - if instance.data[_key] != instance.context.data[_key]: - invalid_keys.append(_key) - - return invalid_keys - - @classmethod - def repair(cls, instance): - """Repair instance data with context data.""" - invalid_keys = cls.get_invalid(instance) - - create_context = instance.context.data["create_context"] - - instance_id = instance.data.get("instance_id") - created_instance = create_context.get_instance_by_id( - instance_id - ) - for _key in invalid_keys: - created_instance[_key] = instance.context.data[_key] - - create_context.save_changes() diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_backdrop.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_backdrop.py deleted file mode 100644 index f7b94e0c82..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_backdrop.py +++ /dev/null @@ -1,101 +0,0 @@ -import nuke -import pyblish -from ayon_nuke import api as napi - -from ayon_core.pipeline.publish import ( - ValidateContentsOrder, - PublishXmlValidationError, - OptionalPyblishPluginMixin -) - -class SelectCenterInNodeGraph(pyblish.api.Action): - """ - Centering failed instance node in node grap - """ - - label = "Center node in node graph" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - all_xC = [] - all_yC = [] - - # maintain selection - with napi.maintained_selection(): - # collect all failed nodes xpos and ypos - for instance in instances: - bdn = instance.data["transientData"]["node"] - xC = bdn.xpos() + bdn.screenWidth() / 2 - yC = bdn.ypos() + bdn.screenHeight() / 2 - - all_xC.append(xC) - all_yC.append(yC) - - self.log.debug("all_xC: `{}`".format(all_xC)) - self.log.debug("all_yC: `{}`".format(all_yC)) - - # zoom to nodes in node graph - nuke.zoom(2, [min(all_xC), min(all_yC)]) - - -class ValidateBackdrop( - pyblish.api.InstancePlugin, - OptionalPyblishPluginMixin -): - """ Validate amount of nodes on backdrop node in case user - forgotten to add nodes above the publishing backdrop node. - """ - - order = ValidateContentsOrder - optional = True - families = ["nukenodes"] - label = "Validate Backdrop" - hosts = ["nuke"] - actions = [SelectCenterInNodeGraph] - - settings_category = "nuke" - - def process(self, instance): - if not self.is_active(instance.data): - return - - child_nodes = instance.data["transientData"]["childNodes"] - connections_out = instance.data["transientData"]["nodeConnectionsOut"] - - msg_multiple_outputs = ( - "Only one outcoming connection from " - "\"{}\" is allowed").format(instance.data["name"]) - - if len(connections_out.keys()) > 1: - raise PublishXmlValidationError( - self, - msg_multiple_outputs, - "multiple_outputs" - ) - - msg_no_nodes = "No content on backdrop node: \"{}\"".format( - instance.data["name"]) - - self.log.debug( - "Amount of nodes on instance: {}".format( - len(child_nodes)) - ) - - if child_nodes == []: - raise PublishXmlValidationError( - self, - msg_no_nodes, - "no_nodes" - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_exposed_knobs.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_exposed_knobs.py deleted file mode 100644 index d1b7c146fb..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_exposed_knobs.py +++ /dev/null @@ -1,82 +0,0 @@ -import pyblish.api - -from ayon_core.pipeline.publish import get_errored_instances_from_context -from ayon_nuke.api.lib import link_knobs -from ayon_core.pipeline.publish import ( - OptionalPyblishPluginMixin, - PublishValidationError -) - - -class RepairExposedKnobs(pyblish.api.Action): - label = "Repair" - on = "failed" - icon = "wrench" - - def process(self, context, plugin): - instances = get_errored_instances_from_context(context) - - for instance in instances: - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - write_group_node = instance.data["transientData"]["node"] - # get write node from inside of group - write_node = None - for x in child_nodes: - if x.Class() == "Write": - write_node = x - - product_type = instance.data["productType"] - plugin_name = plugin.product_types_mapping[product_type] - nuke_settings = instance.context.data["project_settings"]["nuke"] - create_settings = nuke_settings["create"][plugin_name] - exposed_knobs = create_settings["exposed_knobs"] - link_knobs(exposed_knobs, write_node, write_group_node) - - -class ValidateExposedKnobs( - OptionalPyblishPluginMixin, - pyblish.api.InstancePlugin -): - """ Validate write node exposed knobs. - - Compare exposed linked knobs to settings. - """ - - order = pyblish.api.ValidatorOrder - optional = True - families = ["render", "prerender", "image"] - label = "Validate Exposed Knobs" - actions = [RepairExposedKnobs] - hosts = ["nuke"] - - settings_category = "nuke" - - product_types_mapping = { - "render": "CreateWriteRender", - "prerender": "CreateWritePrerender", - "image": "CreateWriteImage" - } - - def process(self, instance): - if not self.is_active(instance.data): - return - - product_type = instance.data["productType"] - plugin = self.product_types_mapping[product_type] - group_node = instance.data["transientData"]["node"] - nuke_settings = instance.context.data["project_settings"]["nuke"] - create_settings = nuke_settings["create"][plugin] - exposed_knobs = create_settings.get("exposed_knobs", []) - unexposed_knobs = [] - for knob in exposed_knobs: - if knob not in group_node.knobs(): - unexposed_knobs.append(knob) - - if unexposed_knobs: - raise PublishValidationError( - "Missing exposed knobs: {}".format(unexposed_knobs) - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_gizmo.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_gizmo.py deleted file mode 100644 index 55249ae931..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_gizmo.py +++ /dev/null @@ -1,72 +0,0 @@ -import pyblish -from ayon_core.pipeline import PublishXmlValidationError -from ayon_nuke import api as napi -import nuke - - -class OpenFailedGroupNode(pyblish.api.Action): - """ - Centering failed instance node in node grap - """ - - label = "Open Group" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - - # Get the errored instances - failed = [] - for result in context.data["results"]: - if (result["error"] is not None and result["instance"] is not None - and result["instance"] not in failed): - failed.append(result["instance"]) - - # Apply pyblish.logic to get the instances for the plug-in - instances = pyblish.api.instances_by_plugin(failed, plugin) - - # maintain selection - with napi.maintained_selection(): - # collect all failed nodes xpos and ypos - for instance in instances: - grpn = instance.data["transientData"]["node"] - nuke.showDag(grpn) - - -class ValidateGizmo(pyblish.api.InstancePlugin): - """Validate amount of output nodes in gizmo (group) node""" - - order = pyblish.api.ValidatorOrder - optional = True - families = ["gizmo"] - label = "Validate Gizmo (group)" - hosts = ["nuke"] - actions = [OpenFailedGroupNode] - - settings_category = "nuke" - - def process(self, instance): - grpn = instance.data["transientData"]["node"] - - with grpn: - connections_out = nuke.allNodes('Output') - if len(connections_out) > 1: - msg_multiple_outputs = ( - "Only one outcoming connection from " - "\"{}\" is allowed").format(instance.data["name"]) - - raise PublishXmlValidationError( - self, msg_multiple_outputs, "multiple_outputs", - {"node_name": grpn["name"].value()} - ) - - connections_in = nuke.allNodes('Input') - if len(connections_in) == 0: - msg_missing_inputs = ( - "At least one Input node has to be inside Group: " - "\"{}\"").format(instance.data["name"]) - - raise PublishXmlValidationError( - self, msg_missing_inputs, "no_inputs", - {"node_name": grpn["name"].value()} - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_knobs.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_knobs.py deleted file mode 100644 index ea03bd94b2..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_knobs.py +++ /dev/null @@ -1,133 +0,0 @@ -import json - -import nuke -import six -import pyblish.api - -from ayon_core.pipeline.publish import ( - RepairContextAction, - PublishXmlValidationError, -) - - -class ValidateKnobs(pyblish.api.ContextPlugin): - """Ensure knobs are consistent. - - Knobs to validate and their values comes from the - - Controlled by plugin settings that require json in following structure: - "ValidateKnobs": { - "enabled": true, - "knobs": { - "family": { - "knob_name": knob_value - } - } - } - """ - - order = pyblish.api.ValidatorOrder - label = "Validate Knobs" - hosts = ["nuke"] - actions = [RepairContextAction] - optional = True - - settings_category = "nuke" - - knobs = "{}" - - def process(self, context): - invalid = self.get_invalid(context, compute=True) - if invalid: - invalid_items = [ - ( - "Node __{node_name}__ with knob _{label}_ " - "expecting _{expected}_, " - "but is set to _{current}_" - ).format(**i) - for i in invalid - ] - raise PublishXmlValidationError( - self, - "Found knobs with invalid values:\n{}".format(invalid), - formatting_data={ - "invalid_items": "\n".join(invalid_items)} - ) - - @classmethod - def get_invalid(cls, context, compute=False): - invalid = context.data.get("invalid_knobs", []) - if compute: - invalid = cls.get_invalid_knobs(context) - - return invalid - - @classmethod - def get_invalid_knobs(cls, context): - invalid_knobs = [] - - for instance in context: - # Load fresh knobs data for each instance - settings_knobs = json.loads(cls.knobs) - - # Filter families. - families = [instance.data["productType"]] - families += instance.data.get("families", []) - - # Get all knobs to validate. - knobs = {} - for family in families: - # check if dot in family - if "." in family: - family = family.split(".")[0] - - # avoid families not in settings - if family not in settings_knobs: - continue - - # get presets of knobs - for preset in settings_knobs[family]: - knobs[preset] = settings_knobs[family][preset] - - # Get invalid knobs. - nodes = [] - - for node in nuke.allNodes(): - nodes.append(node) - if node.Class() == "Group": - node.begin() - nodes.extend(iter(nuke.allNodes())) - node.end() - - for node in nodes: - for knob in node.knobs(): - if knob not in knobs.keys(): - continue - - expected = knobs[knob] - if node[knob].value() != expected: - invalid_knobs.append( - { - "node_name": node.name(), - "knob": node[knob], - "name": node[knob].name(), - "label": node[knob].label(), - "expected": expected, - "current": node[knob].value() - } - ) - - context.data["invalid_knobs"] = invalid_knobs - return invalid_knobs - - @classmethod - def repair(cls, instance): - invalid = cls.get_invalid(instance) - for data in invalid: - # TODO: will need to improve type definitions - # with the new settings for knob types - if isinstance(data["expected"], six.text_type): - data["knob"].setValue(str(data["expected"])) - continue - - data["knob"].setValue(data["expected"]) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_output_resolution.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_output_resolution.py deleted file mode 100644 index 440cb8b758..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_output_resolution.py +++ /dev/null @@ -1,114 +0,0 @@ -import pyblish.api - -from ayon_nuke import api as napi -from ayon_core.pipeline.publish import RepairAction -from ayon_core.pipeline import ( - PublishXmlValidationError, - OptionalPyblishPluginMixin -) - -import nuke - - -class ValidateOutputResolution( - OptionalPyblishPluginMixin, - pyblish.api.InstancePlugin -): - """Validates Output Resolution. - - It is making sure the resolution of write's input is the same as - Format definition of script in Root node. - """ - - order = pyblish.api.ValidatorOrder - optional = True - families = ["render"] - label = "Validate Write resolution" - hosts = ["nuke"] - actions = [RepairAction] - - settings_category = "nuke" - - missing_msg = "Missing Reformat node in render group node" - resolution_msg = "Reformat is set to wrong format" - - def process(self, instance): - if not self.is_active(instance.data): - return - - invalid = self.get_invalid(instance) - if invalid: - raise PublishXmlValidationError(self, invalid) - - @classmethod - def get_reformat(cls, instance): - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - reformat = None - for inode in child_nodes: - if inode.Class() != "Reformat": - continue - reformat = inode - - return reformat - - @classmethod - def get_invalid(cls, instance): - def _check_resolution(instance, reformat): - root_width = instance.data["resolutionWidth"] - root_height = instance.data["resolutionHeight"] - - write_width = reformat.format().width() - write_height = reformat.format().height() - - if (root_width != write_width) or (root_height != write_height): - return None - else: - return True - - # check if reformat is in render node - reformat = cls.get_reformat(instance) - if not reformat: - return cls.missing_msg - - # check if reformat is set to correct root format - correct_format = _check_resolution(instance, reformat) - if not correct_format: - return cls.resolution_msg - - @classmethod - def repair(cls, instance): - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - invalid = cls.get_invalid(instance) - grp_node = instance.data["transientData"]["node"] - - if cls.missing_msg == invalid: - # make sure we are inside of the group node - with grp_node: - # find input node and select it - _input = None - for inode in child_nodes: - if inode.Class() != "Input": - continue - _input = inode - - # add reformat node under it - with napi.maintained_selection(): - _input['selected'].setValue(True) - _rfn = nuke.createNode("Reformat", "name Reformat01") - _rfn["resize"].setValue(0) - _rfn["black_outside"].setValue(1) - - cls.log.info("Adding reformat node") - - if cls.resolution_msg == invalid: - reformat = cls.get_reformat(instance) - reformat["format"].setValue(nuke.root()["format"].value()) - cls.log.info("Fixing reformat to root.format") diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_proxy_mode.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_proxy_mode.py deleted file mode 100644 index 1eb858b17e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_proxy_mode.py +++ /dev/null @@ -1,38 +0,0 @@ -import pyblish -import nuke -from ayon_core.pipeline import PublishXmlValidationError - - -class FixProxyMode(pyblish.api.Action): - """ - Togger off proxy switch OFF - """ - - label = "Repair" - icon = "wrench" - on = "failed" - - def process(self, context, plugin): - rootNode = nuke.root() - rootNode["proxy"].setValue(False) - - -class ValidateProxyMode(pyblish.api.ContextPlugin): - """Validate active proxy mode""" - - order = pyblish.api.ValidatorOrder - label = "Validate Proxy Mode" - hosts = ["nuke"] - actions = [FixProxyMode] - - settings_category = "nuke" - - def process(self, context): - - rootNode = nuke.root() - isProxy = rootNode["proxy"].value() - - if isProxy: - raise PublishXmlValidationError( - self, "Proxy mode should be toggled OFF" - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_rendered_frames.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_rendered_frames.py deleted file mode 100644 index 20b7f6a6ac..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_rendered_frames.py +++ /dev/null @@ -1,139 +0,0 @@ -import pyblish.api -import clique - -from ayon_core.pipeline import PublishXmlValidationError -from ayon_core.pipeline.publish import get_errored_instances_from_context - - -class RepairActionBase(pyblish.api.Action): - on = "failed" - icon = "wrench" - - @staticmethod - def get_instance(context, plugin): - # Get the errored instances - return get_errored_instances_from_context(context, plugin=plugin) - - def repair_knob(self, context, instances, state): - create_context = context.data["create_context"] - for instance in instances: - # Reset the render knob - instance_id = instance.data.get("instance_id") - created_instance = create_context.get_instance_by_id( - instance_id - ) - created_instance.creator_attributes["render_target"] = state - self.log.info("Rendering toggled to `{}`".format(state)) - - create_context.save_changes() - - -class RepairCollectionActionToLocal(RepairActionBase): - label = "Repair - rerender with \"Local\"" - - def process(self, context, plugin): - instances = self.get_instance(context, plugin) - self.repair_knob(context, instances, "local") - - -class RepairCollectionActionToFarm(RepairActionBase): - label = "Repair - rerender with \"On farm\"" - - def process(self, context, plugin): - instances = self.get_instance(context, plugin) - self.repair_knob(context, instances, "farm") - - -class ValidateRenderedFrames(pyblish.api.InstancePlugin): - """ Validates file output. """ - - order = pyblish.api.ValidatorOrder + 0.1 - families = ["render", "prerender", "still"] - - label = "Validate rendered frame" - hosts = ["nuke", "nukestudio"] - actions = [RepairCollectionActionToLocal, RepairCollectionActionToFarm] - - settings_category = "nuke" - - def process(self, instance): - node = instance.data["transientData"]["node"] - - f_data = { - "node_name": node.name() - } - - for repre in instance.data["representations"]: - - if not repre.get("files"): - msg = ("no frames were collected, " - "you need to render them.\n" - "Check properties of write node (group) and" - "select 'Local' option in 'Publish' dropdown.") - self.log.error(msg) - raise PublishXmlValidationError( - self, msg, formatting_data=f_data) - - if isinstance(repre["files"], str): - return - - collections, remainder = clique.assemble(repre["files"]) - self.log.debug("collections: {}".format(str(collections))) - self.log.debug("remainder: {}".format(str(remainder))) - - collection = collections[0] - - f_start_h = instance.data["frameStartHandle"] - f_end_h = instance.data["frameEndHandle"] - - frame_length = int(f_end_h - f_start_h + 1) - - if frame_length != 1: - if len(collections) != 1: - msg = "There are multiple collections in the folder" - self.log.error(msg) - raise PublishXmlValidationError( - self, msg, formatting_data=f_data) - - if not collection.is_contiguous(): - msg = "Some frames appear to be missing" - self.log.error(msg) - raise PublishXmlValidationError( - self, msg, formatting_data=f_data) - - collected_frames_len = len(collection.indexes) - coll_start = min(collection.indexes) - coll_end = max(collection.indexes) - - self.log.debug("frame_length: {}".format(frame_length)) - self.log.debug("collected_frames_len: {}".format( - collected_frames_len)) - self.log.debug("f_start_h-f_end_h: {}-{}".format( - f_start_h, f_end_h)) - self.log.debug( - "coll_start-coll_end: {}-{}".format(coll_start, coll_end)) - - self.log.debug( - "len(collection.indexes): {}".format(collected_frames_len) - ) - - if ("slate" in instance.data["families"]) \ - and (frame_length != collected_frames_len): - collected_frames_len -= 1 - f_start_h += 1 - - if ( - collected_frames_len != frame_length - and coll_start <= f_start_h - and coll_end >= f_end_h - ): - raise PublishXmlValidationError( - self, ( - "{} missing frames. Use repair to " - "render all frames" - ).format(__name__), formatting_data=f_data - ) - - instance.data["collection"] = collection - - return diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_script_attributes.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_script_attributes.py deleted file mode 100644 index 617d8d835b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_script_attributes.py +++ /dev/null @@ -1,103 +0,0 @@ -from copy import deepcopy -import pyblish.api -from ayon_core.pipeline import ( - PublishXmlValidationError, - OptionalPyblishPluginMixin -) -from ayon_core.pipeline.publish import RepairAction -from ayon_nuke.api.lib import ( - WorkfileSettings -) - - -class ValidateScriptAttributes( - OptionalPyblishPluginMixin, - pyblish.api.InstancePlugin -): - """ Validates file output. """ - - order = pyblish.api.ValidatorOrder + 0.1 - families = ["workfile"] - label = "Validate script attributes" - hosts = ["nuke"] - optional = True - actions = [RepairAction] - - settings_category = "nuke" - - def process(self, instance): - if not self.is_active(instance.data): - return - - script_data = deepcopy(instance.context.data["scriptData"]) - - src_folder_attributes = instance.data["folderEntity"]["attrib"] - - # These attributes will be checked - attributes = [ - "fps", - "frameStart", - "frameEnd", - "resolutionWidth", - "resolutionHeight", - "handleStart", - "handleEnd" - ] - - # get only defined attributes from folder data - folder_attributes = { - attr: src_folder_attributes[attr] - for attr in attributes - if attr in src_folder_attributes - } - # fix frame values to include handles - folder_attributes["fps"] = float("{0:.4f}".format( - folder_attributes["fps"])) - script_data["fps"] = float("{0:.4f}".format( - script_data["fps"])) - - # Compare folder's values Nukescript X Database - not_matching = [] - for attr in attributes: - self.log.debug( - "Folder vs Script attribute \"{}\": {}, {}".format( - attr, - folder_attributes[attr], - script_data[attr] - ) - ) - if folder_attributes[attr] != script_data[attr]: - not_matching.append({ - "name": attr, - "expected": folder_attributes[attr], - "actual": script_data[attr] - }) - - # Raise error if not matching - if not_matching: - msg = "Following attributes are not set correctly: \n{}" - attrs_wrong_str = "\n".join([ - ( - "`{0}` is set to `{1}`, " - "but should be set to `{2}`" - ).format(at["name"], at["actual"], at["expected"]) - for at in not_matching - ]) - attrs_wrong_html = "
".join([ - ( - "-- __{0}__ is set to __{1}__, " - "but should be set to __{2}__" - ).format(at["name"], at["actual"], at["expected"]) - for at in not_matching - ]) - raise PublishXmlValidationError( - self, msg.format(attrs_wrong_str), - formatting_data={ - "failed_attributes": attrs_wrong_html - } - ) - - @classmethod - def repair(cls, instance): - cls.log.debug("__ repairing instance: {}".format(instance)) - WorkfileSettings().set_context_settings() diff --git a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_write_nodes.py b/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_write_nodes.py deleted file mode 100644 index d642a4314c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/publish/validate_write_nodes.py +++ /dev/null @@ -1,156 +0,0 @@ -from collections import defaultdict - -import pyblish.api -from ayon_core.pipeline.publish import get_errored_instances_from_context -from ayon_nuke.api.lib import ( - get_write_node_template_attr, - set_node_knobs_from_settings, - color_gui_to_int -) - -from ayon_core.pipeline.publish import ( - PublishXmlValidationError, - OptionalPyblishPluginMixin -) - - -class RepairNukeWriteNodeAction(pyblish.api.Action): - label = "Repair" - on = "failed" - icon = "wrench" - - def process(self, context, plugin): - instances = get_errored_instances_from_context(context) - - for instance in instances: - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - write_group_node = instance.data["transientData"]["node"] - # get write node from inside of group - write_node = None - for x in child_nodes: - if x.Class() == "Write": - write_node = x - - correct_data = get_write_node_template_attr(write_group_node) - - set_node_knobs_from_settings(write_node, correct_data["knobs"]) - - self.log.debug("Node attributes were fixed") - - -class ValidateNukeWriteNode( - OptionalPyblishPluginMixin, - pyblish.api.InstancePlugin -): - """ Validate Write node's knobs. - - Compare knobs on write node inside the render group - with settings. At the moment supporting only `file` knob. - """ - - order = pyblish.api.ValidatorOrder - optional = True - families = ["render"] - label = "Validate write node" - actions = [RepairNukeWriteNodeAction] - hosts = ["nuke"] - - settings_category = "nuke" - - def process(self, instance): - if not self.is_active(instance.data): - return - - child_nodes = ( - instance.data.get("transientData", {}).get("childNodes") - or instance - ) - - write_group_node = instance.data["transientData"]["node"] - - # get write node from inside of group - write_node = None - for x in child_nodes: - if x.Class() == "Write": - write_node = x - - if write_node is None: - return - - correct_data = get_write_node_template_attr(write_group_node) - - check = [] - - # Collect key values of same type in a list. - values_by_name = defaultdict(list) - for knob_data in correct_data["knobs"]: - knob_type = knob_data["type"] - knob_value = knob_data[knob_type] - - values_by_name[knob_data["name"]].append(knob_value) - - for knob_data in correct_data["knobs"]: - knob_type = knob_data["type"] - - if ( - knob_type == "__legacy__" - ): - raise PublishXmlValidationError( - self, ( - "Please update data in settings 'project_settings" - "/nuke/imageio/nodes/required_nodes'" - ), - key="legacy" - ) - - key = knob_data["name"] - values = values_by_name[key] - node_value = write_node[key].value() - - # fix type differences - fixed_values = [] - for value in values: - if type(node_value) in (int, float): - try: - if isinstance(value, list): - value = color_gui_to_int(value) - else: - value = float(value) - node_value = float(node_value) - except ValueError: - value = str(value) - else: - value = str(value) - node_value = str(node_value) - - fixed_values.append(value) - - if ( - node_value not in fixed_values - and key != "file" - and key != "tile_color" - ): - check.append([key, fixed_values, write_node[key].value()]) - - if check: - self._make_error(check) - - def _make_error(self, check): - # sourcery skip: merge-assign-and-aug-assign, move-assign-in-block - dbg_msg = "Write node's knobs values are not correct!\n" - msg_add = "Knob '{0}' > Expected: `{1}` > Current: `{2}`" - - details = [ - msg_add.format(item[0], item[1], item[2]) - for item in check - ] - xml_msg = "
".join(details) - dbg_msg += "\n\t".join(details) - - raise PublishXmlValidationError( - self, dbg_msg, formatting_data={"xml_msg": xml_msg} - ) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/create_placeholder.py b/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/create_placeholder.py deleted file mode 100644 index 4d43d59bad..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/create_placeholder.py +++ /dev/null @@ -1,428 +0,0 @@ -import nuke - -from ayon_core.pipeline.workfile.workfile_template_builder import ( - CreatePlaceholderItem, - PlaceholderCreateMixin, -) -from ayon_nuke.api.lib import ( - find_free_space_to_paste_nodes, - get_extreme_positions, - get_group_io_nodes, - imprint, - refresh_node, - refresh_nodes, - reset_selection, - get_names_from_nodes, - get_nodes_by_names, - select_nodes, - duplicate_node, - node_tempfile, -) -from ayon_nuke.api.workfile_template_builder import ( - NukePlaceholderPlugin -) - - -class NukePlaceholderCreatePlugin( - NukePlaceholderPlugin, PlaceholderCreateMixin -): - identifier = "nuke.create" - label = "Nuke create" - - def _parse_placeholder_node_data(self, node): - placeholder_data = super( - NukePlaceholderCreatePlugin, self - )._parse_placeholder_node_data(node) - - node_knobs = node.knobs() - nb_children = 0 - if "nb_children" in node_knobs: - nb_children = int(node_knobs["nb_children"].getValue()) - placeholder_data["nb_children"] = nb_children - - siblings = [] - if "siblings" in node_knobs: - siblings = node_knobs["siblings"].values() - placeholder_data["siblings"] = siblings - - node_full_name = node.fullName() - placeholder_data["group_name"] = node_full_name.rpartition(".")[0] - placeholder_data["last_loaded"] = [] - placeholder_data["delete"] = False - return placeholder_data - - def _before_instance_create(self, placeholder): - placeholder.data["nodes_init"] = nuke.allNodes() - - def collect_placeholders(self): - output = [] - scene_placeholders = self._collect_scene_placeholders() - for node_name, node in scene_placeholders.items(): - plugin_identifier_knob = node.knob("plugin_identifier") - if ( - plugin_identifier_knob is None - or plugin_identifier_knob.getValue() != self.identifier - ): - continue - - placeholder_data = self._parse_placeholder_node_data(node) - - output.append( - CreatePlaceholderItem(node_name, placeholder_data, self) - ) - - return output - - def populate_placeholder(self, placeholder): - self.populate_create_placeholder(placeholder) - - def repopulate_placeholder(self, placeholder): - self.populate_create_placeholder(placeholder) - - def get_placeholder_options(self, options=None): - return self.get_create_plugin_options(options) - - def post_placeholder_process(self, placeholder, failed): - """Cleanup placeholder after load of its corresponding representations. - - Args: - placeholder (PlaceholderItem): Item which was just used to load - representation. - failed (bool): Loading of representation failed. - """ - # deselect all selected nodes - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - # getting the latest nodes added - nodes_init = placeholder.data["nodes_init"] - nodes_created = list(set(nuke.allNodes()) - set(nodes_init)) - self.log.debug("Created nodes: {}".format(nodes_created)) - if not nodes_created: - return - - placeholder.data["delete"] = True - - nodes_created = self._move_to_placeholder_group( - placeholder, nodes_created - ) - placeholder.data["last_created"] = nodes_created - refresh_nodes(nodes_created) - - # positioning of the created nodes - min_x, min_y, _, _ = get_extreme_positions(nodes_created) - for node in nodes_created: - xpos = (node.xpos() - min_x) + placeholder_node.xpos() - ypos = (node.ypos() - min_y) + placeholder_node.ypos() - node.setXYpos(xpos, ypos) - refresh_nodes(nodes_created) - - # fix the problem of z_order for backdrops - self._fix_z_order(placeholder) - - if placeholder.data.get("keep_placeholder"): - self._imprint_siblings(placeholder) - - if placeholder.data["nb_children"] == 0: - # save initial nodes positions and dimensions, update them - # and set inputs and outputs of created nodes - - if placeholder.data.get("keep_placeholder"): - self._imprint_inits() - self._update_nodes(placeholder, nuke.allNodes(), nodes_created) - - self._set_created_connections(placeholder) - - elif placeholder.data["siblings"]: - # create copies of placeholder siblings for the new created nodes, - # set their inputs and outputs and update all nodes positions and - # dimensions and siblings names - - siblings = get_nodes_by_names(placeholder.data["siblings"]) - refresh_nodes(siblings) - copies = self._create_sib_copies(placeholder) - new_nodes = list(copies.values()) # copies nodes - self._update_nodes(new_nodes, nodes_created) - placeholder_node.removeKnob(placeholder_node.knob("siblings")) - new_nodes_name = get_names_from_nodes(new_nodes) - imprint(placeholder_node, {"siblings": new_nodes_name}) - self._set_copies_connections(placeholder, copies) - - self._update_nodes( - nuke.allNodes(), - new_nodes + nodes_created, - 20 - ) - - new_siblings = get_names_from_nodes(new_nodes) - placeholder.data["siblings"] = new_siblings - - else: - # if the placeholder doesn't have siblings, the created - # nodes will be placed in a free space - - xpointer, ypointer = find_free_space_to_paste_nodes( - nodes_created, direction="bottom", offset=200 - ) - node = nuke.createNode("NoOp") - reset_selection() - nuke.delete(node) - for node in nodes_created: - xpos = (node.xpos() - min_x) + xpointer - ypos = (node.ypos() - min_y) + ypointer - node.setXYpos(xpos, ypos) - - placeholder.data["nb_children"] += 1 - reset_selection() - - # go back to root group - nuke.root().begin() - - def _move_to_placeholder_group(self, placeholder, nodes_created): - """ - opening the placeholder's group and copying created nodes in it. - - Returns : - nodes_created (list): the new list of pasted nodes - """ - groups_name = placeholder.data["group_name"] - reset_selection() - select_nodes(nodes_created) - if groups_name: - with node_tempfile() as filepath: - nuke.nodeCopy(filepath) - for node in nuke.selectedNodes(): - nuke.delete(node) - group = nuke.toNode(groups_name) - group.begin() - nuke.nodePaste(filepath) - nodes_created = nuke.selectedNodes() - return nodes_created - - def _fix_z_order(self, placeholder): - """Fix the problem of z_order when a backdrop is create.""" - - nodes_created = placeholder.data["last_created"] - created_backdrops = [] - bd_orders = set() - for node in nodes_created: - if isinstance(node, nuke.BackdropNode): - created_backdrops.append(node) - bd_orders.add(node.knob("z_order").getValue()) - - if not bd_orders: - return - - sib_orders = set() - for node_name in placeholder.data["siblings"]: - node = nuke.toNode(node_name) - if isinstance(node, nuke.BackdropNode): - sib_orders.add(node.knob("z_order").getValue()) - - if not sib_orders: - return - - min_order = min(bd_orders) - max_order = max(sib_orders) - for backdrop_node in created_backdrops: - z_order = backdrop_node.knob("z_order").getValue() - backdrop_node.knob("z_order").setValue( - z_order + max_order - min_order + 1) - - def _imprint_siblings(self, placeholder): - """ - - add siblings names to placeholder attributes (nodes created with it) - - add Id to the attributes of all the other nodes - """ - - created_nodes = placeholder.data["last_created"] - created_nodes_set = set(created_nodes) - - for node in created_nodes: - node_knobs = node.knobs() - - if ( - "is_placeholder" not in node_knobs - or ( - "is_placeholder" in node_knobs - and node.knob("is_placeholder").value() - ) - ): - siblings = list(created_nodes_set - {node}) - siblings_name = get_names_from_nodes(siblings) - siblings = {"siblings": siblings_name} - imprint(node, siblings) - - def _imprint_inits(self): - """Add initial positions and dimensions to the attributes""" - - for node in nuke.allNodes(): - refresh_node(node) - imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) - node.knob("x_init").setVisible(False) - node.knob("y_init").setVisible(False) - width = node.screenWidth() - height = node.screenHeight() - if "bdwidth" in node.knobs(): - imprint(node, {"w_init": width, "h_init": height}) - node.knob("w_init").setVisible(False) - node.knob("h_init").setVisible(False) - refresh_node(node) - - def _update_nodes( - self, placeholder, nodes, considered_nodes, offset_y=None - ): - """Adjust backdrop nodes dimensions and positions. - - Considering some nodes sizes. - - Args: - nodes (list): list of nodes to update - considered_nodes (list): list of nodes to consider while updating - positions and dimensions - offset (int): distance between copies - """ - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) - - diff_x = diff_y = 0 - contained_nodes = [] # for backdrops - - if offset_y is None: - width_ph = placeholder_node.screenWidth() - height_ph = placeholder_node.screenHeight() - diff_y = max_y - min_y - height_ph - diff_x = max_x - min_x - width_ph - contained_nodes = [placeholder_node] - min_x = placeholder_node.xpos() - min_y = placeholder_node.ypos() - else: - siblings = get_nodes_by_names(placeholder.data["siblings"]) - minX, _, maxX, _ = get_extreme_positions(siblings) - diff_y = max_y - min_y + 20 - diff_x = abs(max_x - min_x - maxX + minX) - contained_nodes = considered_nodes - - if diff_y <= 0 and diff_x <= 0: - return - - for node in nodes: - refresh_node(node) - - if ( - node == placeholder_node - or node in considered_nodes - ): - continue - - if ( - not isinstance(node, nuke.BackdropNode) - or ( - isinstance(node, nuke.BackdropNode) - and not set(contained_nodes) <= set(node.getNodes()) - ) - ): - if offset_y is None and node.xpos() >= min_x: - node.setXpos(node.xpos() + diff_x) - - if node.ypos() >= min_y: - node.setYpos(node.ypos() + diff_y) - - else: - width = node.screenWidth() - height = node.screenHeight() - node.knob("bdwidth").setValue(width + diff_x) - node.knob("bdheight").setValue(height + diff_y) - - refresh_node(node) - - def _set_created_connections(self, placeholder): - """ - set inputs and outputs of created nodes""" - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - input_node, output_node = get_group_io_nodes( - placeholder.data["last_created"] - ) - for node in placeholder_node.dependent(): - for idx in range(node.inputs()): - if node.input(idx) == placeholder_node and output_node: - node.setInput(idx, output_node) - - for node in placeholder_node.dependencies(): - for idx in range(placeholder_node.inputs()): - if placeholder_node.input(idx) == node and input_node: - input_node.setInput(0, node) - - def _create_sib_copies(self, placeholder): - """ creating copies of the palce_holder siblings (the ones who were - created with it) for the new nodes added - - Returns : - copies (dict) : with copied nodes names and their copies - """ - - copies = {} - siblings = get_nodes_by_names(placeholder.data["siblings"]) - for node in siblings: - new_node = duplicate_node(node) - - x_init = int(new_node.knob("x_init").getValue()) - y_init = int(new_node.knob("y_init").getValue()) - new_node.setXYpos(x_init, y_init) - if isinstance(new_node, nuke.BackdropNode): - w_init = new_node.knob("w_init").getValue() - h_init = new_node.knob("h_init").getValue() - new_node.knob("bdwidth").setValue(w_init) - new_node.knob("bdheight").setValue(h_init) - refresh_node(node) - - if "repre_id" in node.knobs().keys(): - node.removeKnob(node.knob("repre_id")) - copies[node.name()] = new_node - return copies - - def _set_copies_connections(self, placeholder, copies): - """Set inputs and outputs of the copies. - - Args: - copies (dict): Copied nodes by their names. - """ - - last_input, last_output = get_group_io_nodes( - placeholder.data["last_created"] - ) - siblings = get_nodes_by_names(placeholder.data["siblings"]) - siblings_input, siblings_output = get_group_io_nodes(siblings) - copy_input = copies[siblings_input.name()] - copy_output = copies[siblings_output.name()] - - for node_init in siblings: - if node_init == siblings_output: - continue - - node_copy = copies[node_init.name()] - for node in node_init.dependent(): - for idx in range(node.inputs()): - if node.input(idx) != node_init: - continue - - if node in siblings: - copies[node.name()].setInput(idx, node_copy) - else: - last_input.setInput(0, node_copy) - - for node in node_init.dependencies(): - for idx in range(node_init.inputs()): - if node_init.input(idx) != node: - continue - - if node_init == siblings_input: - copy_input.setInput(idx, node) - elif node in siblings: - node_copy.setInput(idx, copies[node.name()]) - else: - node_copy.setInput(idx, last_output) - - siblings_input.setInput(0, copy_output) diff --git a/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/load_placeholder.py b/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/load_placeholder.py deleted file mode 100644 index 68bc10e41b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/plugins/workfile_build/load_placeholder.py +++ /dev/null @@ -1,455 +0,0 @@ -import nuke - -from ayon_core.pipeline.workfile.workfile_template_builder import ( - LoadPlaceholderItem, - PlaceholderLoadMixin, -) -from ayon_nuke.api.lib import ( - find_free_space_to_paste_nodes, - get_extreme_positions, - get_group_io_nodes, - imprint, - refresh_node, - refresh_nodes, - reset_selection, - get_names_from_nodes, - get_nodes_by_names, - select_nodes, - duplicate_node, - node_tempfile, -) -from ayon_nuke.api.workfile_template_builder import ( - NukePlaceholderPlugin -) - - -class NukePlaceholderLoadPlugin(NukePlaceholderPlugin, PlaceholderLoadMixin): - identifier = "nuke.load" - label = "Nuke load" - - def _parse_placeholder_node_data(self, node): - placeholder_data = super( - NukePlaceholderLoadPlugin, self - )._parse_placeholder_node_data(node) - - node_knobs = node.knobs() - nb_children = 0 - if "nb_children" in node_knobs: - nb_children = int(node_knobs["nb_children"].getValue()) - placeholder_data["nb_children"] = nb_children - - siblings = [] - if "siblings" in node_knobs: - siblings = node_knobs["siblings"].values() - placeholder_data["siblings"] = siblings - - node_full_name = node.fullName() - placeholder_data["group_name"] = node_full_name.rpartition(".")[0] - placeholder_data["last_loaded"] = [] - placeholder_data["delete"] = False - return placeholder_data - - def _get_loaded_repre_ids(self): - loaded_representation_ids = self.builder.get_shared_populate_data( - "loaded_representation_ids" - ) - if loaded_representation_ids is None: - loaded_representation_ids = set() - for node in nuke.allNodes(): - if "repre_id" in node.knobs(): - loaded_representation_ids.add( - node.knob("repre_id").getValue() - ) - - self.builder.set_shared_populate_data( - "loaded_representation_ids", loaded_representation_ids - ) - return loaded_representation_ids - - def _before_placeholder_load(self, placeholder): - placeholder.data["nodes_init"] = nuke.allNodes() - - def _before_repre_load(self, placeholder, representation): - placeholder.data["last_repre_id"] = representation["id"] - - def collect_placeholders(self): - output = [] - scene_placeholders = self._collect_scene_placeholders() - for node_name, node in scene_placeholders.items(): - plugin_identifier_knob = node.knob("plugin_identifier") - if ( - plugin_identifier_knob is None - or plugin_identifier_knob.getValue() != self.identifier - ): - continue - - placeholder_data = self._parse_placeholder_node_data(node) - # TODO do data validations and maybe updgrades if are invalid - output.append( - LoadPlaceholderItem(node_name, placeholder_data, self) - ) - - return output - - def populate_placeholder(self, placeholder): - self.populate_load_placeholder(placeholder) - - def repopulate_placeholder(self, placeholder): - repre_ids = self._get_loaded_repre_ids() - self.populate_load_placeholder(placeholder, repre_ids) - - def get_placeholder_options(self, options=None): - return self.get_load_plugin_options(options) - - def post_placeholder_process(self, placeholder, failed): - """Cleanup placeholder after load of its corresponding representations. - - Args: - placeholder (PlaceholderItem): Item which was just used to load - representation. - failed (bool): Loading of representation failed. - """ - # deselect all selected nodes - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - # getting the latest nodes added - # TODO get from shared populate data! - nodes_init = placeholder.data["nodes_init"] - nodes_loaded = list(set(nuke.allNodes()) - set(nodes_init)) - self.log.debug("Loaded nodes: {}".format(nodes_loaded)) - if not nodes_loaded: - return - - placeholder.data["delete"] = True - - nodes_loaded = self._move_to_placeholder_group( - placeholder, nodes_loaded - ) - placeholder.data["last_loaded"] = nodes_loaded - refresh_nodes(nodes_loaded) - - # positioning of the loaded nodes - min_x, min_y, _, _ = get_extreme_positions(nodes_loaded) - for node in nodes_loaded: - xpos = (node.xpos() - min_x) + placeholder_node.xpos() - ypos = (node.ypos() - min_y) + placeholder_node.ypos() - node.setXYpos(xpos, ypos) - refresh_nodes(nodes_loaded) - - # fix the problem of z_order for backdrops - self._fix_z_order(placeholder) - - if placeholder.data.get("keep_placeholder"): - self._imprint_siblings(placeholder) - - if placeholder.data["nb_children"] == 0: - # save initial nodes positions and dimensions, update them - # and set inputs and outputs of loaded nodes - if placeholder.data.get("keep_placeholder"): - self._imprint_inits() - self._update_nodes(placeholder, nuke.allNodes(), nodes_loaded) - - self._set_loaded_connections(placeholder) - - elif placeholder.data["siblings"]: - # create copies of placeholder siblings for the new loaded nodes, - # set their inputs and outputs and update all nodes positions and - # dimensions and siblings names - - siblings = get_nodes_by_names(placeholder.data["siblings"]) - refresh_nodes(siblings) - copies = self._create_sib_copies(placeholder) - new_nodes = list(copies.values()) # copies nodes - self._update_nodes(new_nodes, nodes_loaded) - placeholder_node.removeKnob(placeholder_node.knob("siblings")) - new_nodes_name = get_names_from_nodes(new_nodes) - imprint(placeholder_node, {"siblings": new_nodes_name}) - self._set_copies_connections(placeholder, copies) - - self._update_nodes( - nuke.allNodes(), - new_nodes + nodes_loaded, - 20 - ) - - new_siblings = get_names_from_nodes(new_nodes) - placeholder.data["siblings"] = new_siblings - - else: - # if the placeholder doesn't have siblings, the loaded - # nodes will be placed in a free space - - xpointer, ypointer = find_free_space_to_paste_nodes( - nodes_loaded, direction="bottom", offset=200 - ) - node = nuke.createNode("NoOp") - reset_selection() - nuke.delete(node) - for node in nodes_loaded: - xpos = (node.xpos() - min_x) + xpointer - ypos = (node.ypos() - min_y) + ypointer - node.setXYpos(xpos, ypos) - - placeholder.data["nb_children"] += 1 - reset_selection() - - # go back to root group - nuke.root().begin() - - def _move_to_placeholder_group(self, placeholder, nodes_loaded): - """ - opening the placeholder's group and copying loaded nodes in it. - - Returns : - nodes_loaded (list): the new list of pasted nodes - """ - - groups_name = placeholder.data["group_name"] - reset_selection() - select_nodes(nodes_loaded) - if groups_name: - with node_tempfile() as filepath: - nuke.nodeCopy(filepath) - for node in nuke.selectedNodes(): - nuke.delete(node) - group = nuke.toNode(groups_name) - group.begin() - nuke.nodePaste(filepath) - nodes_loaded = nuke.selectedNodes() - return nodes_loaded - - def _fix_z_order(self, placeholder): - """Fix the problem of z_order when a backdrop is loaded.""" - - nodes_loaded = placeholder.data["last_loaded"] - loaded_backdrops = [] - bd_orders = set() - for node in nodes_loaded: - if isinstance(node, nuke.BackdropNode): - loaded_backdrops.append(node) - bd_orders.add(node.knob("z_order").getValue()) - - if not bd_orders: - return - - sib_orders = set() - for node_name in placeholder.data["siblings"]: - node = nuke.toNode(node_name) - if isinstance(node, nuke.BackdropNode): - sib_orders.add(node.knob("z_order").getValue()) - - if not sib_orders: - return - - min_order = min(bd_orders) - max_order = max(sib_orders) - for backdrop_node in loaded_backdrops: - z_order = backdrop_node.knob("z_order").getValue() - backdrop_node.knob("z_order").setValue( - z_order + max_order - min_order + 1) - - def _imprint_siblings(self, placeholder): - """ - - add siblings names to placeholder attributes (nodes loaded with it) - - add Id to the attributes of all the other nodes - """ - - loaded_nodes = placeholder.data["last_loaded"] - loaded_nodes_set = set(loaded_nodes) - data = {"repre_id": str(placeholder.data["last_repre_id"])} - - for node in loaded_nodes: - node_knobs = node.knobs() - if "builder_type" not in node_knobs: - # save the id of representation for all imported nodes - imprint(node, data) - node.knob("repre_id").setVisible(False) - refresh_node(node) - continue - - if ( - "is_placeholder" not in node_knobs - or ( - "is_placeholder" in node_knobs - and node.knob("is_placeholder").value() - ) - ): - siblings = list(loaded_nodes_set - {node}) - siblings_name = get_names_from_nodes(siblings) - siblings = {"siblings": siblings_name} - imprint(node, siblings) - - def _imprint_inits(self): - """Add initial positions and dimensions to the attributes""" - - for node in nuke.allNodes(): - refresh_node(node) - imprint(node, {"x_init": node.xpos(), "y_init": node.ypos()}) - node.knob("x_init").setVisible(False) - node.knob("y_init").setVisible(False) - width = node.screenWidth() - height = node.screenHeight() - if "bdwidth" in node.knobs(): - imprint(node, {"w_init": width, "h_init": height}) - node.knob("w_init").setVisible(False) - node.knob("h_init").setVisible(False) - refresh_node(node) - - def _update_nodes( - self, placeholder, nodes, considered_nodes, offset_y=None - ): - """Adjust backdrop nodes dimensions and positions. - - Considering some nodes sizes. - - Args: - nodes (list): list of nodes to update - considered_nodes (list): list of nodes to consider while updating - positions and dimensions - offset (int): distance between copies - """ - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - - min_x, min_y, max_x, max_y = get_extreme_positions(considered_nodes) - - diff_x = diff_y = 0 - contained_nodes = [] # for backdrops - - if offset_y is None: - width_ph = placeholder_node.screenWidth() - height_ph = placeholder_node.screenHeight() - diff_y = max_y - min_y - height_ph - diff_x = max_x - min_x - width_ph - contained_nodes = [placeholder_node] - min_x = placeholder_node.xpos() - min_y = placeholder_node.ypos() - else: - siblings = get_nodes_by_names(placeholder.data["siblings"]) - minX, _, maxX, _ = get_extreme_positions(siblings) - diff_y = max_y - min_y + 20 - diff_x = abs(max_x - min_x - maxX + minX) - contained_nodes = considered_nodes - - if diff_y <= 0 and diff_x <= 0: - return - - for node in nodes: - refresh_node(node) - - if ( - node == placeholder_node - or node in considered_nodes - ): - continue - - if ( - not isinstance(node, nuke.BackdropNode) - or ( - isinstance(node, nuke.BackdropNode) - and not set(contained_nodes) <= set(node.getNodes()) - ) - ): - if offset_y is None and node.xpos() >= min_x: - node.setXpos(node.xpos() + diff_x) - - if node.ypos() >= min_y: - node.setYpos(node.ypos() + diff_y) - - else: - width = node.screenWidth() - height = node.screenHeight() - node.knob("bdwidth").setValue(width + diff_x) - node.knob("bdheight").setValue(height + diff_y) - - refresh_node(node) - - def _set_loaded_connections(self, placeholder): - """ - set inputs and outputs of loaded nodes""" - - placeholder_node = nuke.toNode(placeholder.scene_identifier) - input_node, output_node = get_group_io_nodes( - placeholder.data["last_loaded"] - ) - for node in placeholder_node.dependent(): - for idx in range(node.inputs()): - if node.input(idx) == placeholder_node and output_node: - node.setInput(idx, output_node) - - for node in placeholder_node.dependencies(): - for idx in range(placeholder_node.inputs()): - if placeholder_node.input(idx) == node and input_node: - input_node.setInput(0, node) - - def _create_sib_copies(self, placeholder): - """ creating copies of the palce_holder siblings (the ones who were - loaded with it) for the new nodes added - - Returns : - copies (dict) : with copied nodes names and their copies - """ - - copies = {} - siblings = get_nodes_by_names(placeholder.data["siblings"]) - for node in siblings: - new_node = duplicate_node(node) - - x_init = int(new_node.knob("x_init").getValue()) - y_init = int(new_node.knob("y_init").getValue()) - new_node.setXYpos(x_init, y_init) - if isinstance(new_node, nuke.BackdropNode): - w_init = new_node.knob("w_init").getValue() - h_init = new_node.knob("h_init").getValue() - new_node.knob("bdwidth").setValue(w_init) - new_node.knob("bdheight").setValue(h_init) - refresh_node(node) - - if "repre_id" in node.knobs().keys(): - node.removeKnob(node.knob("repre_id")) - copies[node.name()] = new_node - return copies - - def _set_copies_connections(self, placeholder, copies): - """Set inputs and outputs of the copies. - - Args: - copies (dict): Copied nodes by their names. - """ - - last_input, last_output = get_group_io_nodes( - placeholder.data["last_loaded"] - ) - siblings = get_nodes_by_names(placeholder.data["siblings"]) - siblings_input, siblings_output = get_group_io_nodes(siblings) - copy_input = copies[siblings_input.name()] - copy_output = copies[siblings_output.name()] - - for node_init in siblings: - if node_init == siblings_output: - continue - - node_copy = copies[node_init.name()] - for node in node_init.dependent(): - for idx in range(node.inputs()): - if node.input(idx) != node_init: - continue - - if node in siblings: - copies[node.name()].setInput(idx, node_copy) - else: - last_input.setInput(0, node_copy) - - for node in node_init.dependencies(): - for idx in range(node_init.inputs()): - if node_init.input(idx) != node: - continue - - if node_init == siblings_input: - copy_input.setInput(idx, node) - elif node in siblings: - node_copy.setInput(idx, copies[node.name()]) - else: - node_copy.setInput(idx, last_output) - - siblings_input.setInput(0, copy_output) diff --git a/server_addon/nuke/client/ayon_nuke/startup/__init__.py b/server_addon/nuke/client/ayon_nuke/startup/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/startup/clear_rendered.py b/server_addon/nuke/client/ayon_nuke/startup/clear_rendered.py deleted file mode 100644 index 8072aae14f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/startup/clear_rendered.py +++ /dev/null @@ -1,12 +0,0 @@ -import os - -from ayon_core.lib import Logger - - -def clear_rendered(dir_path): - log = Logger.get_logger(__name__) - - for _f in os.listdir(dir_path): - _f_path = os.path.join(dir_path, _f) - log.info("Removing: `{}`".format(_f_path)) - os.remove(_f_path) diff --git a/server_addon/nuke/client/ayon_nuke/startup/custom_write_node.py b/server_addon/nuke/client/ayon_nuke/startup/custom_write_node.py deleted file mode 100644 index 5b0f240a49..0000000000 --- a/server_addon/nuke/client/ayon_nuke/startup/custom_write_node.py +++ /dev/null @@ -1,153 +0,0 @@ -""" AYON custom script for setting up write nodes for non-publish """ -import os -import nuke -import nukescripts -from ayon_core.pipeline import Anatomy, get_current_project_name -from ayon_nuke.api.lib import ( - set_node_knobs_from_settings, - get_nuke_imageio_settings -) - - -temp_rendering_path_template = ( - "{work}/renders/nuke/{subset}/{subset}.{frame}.{ext}") - -knobs_setting = { - "knobs": [ - { - "type": "text", - "name": "file_type", - "value": "exr" - }, - { - "type": "text", - "name": "datatype", - "value": "16 bit half" - }, - { - "type": "text", - "name": "compression", - "value": "Zip (1 scanline)" - }, - { - "type": "bool", - "name": "autocrop", - "value": True - }, - { - "type": "color_gui", - "name": "tile_color", - "value": [ - 186, - 35, - 35, - 255 - ] - }, - { - "type": "text", - "name": "channels", - "value": "rgb" - }, - { - "type": "bool", - "name": "create_directories", - "value": True - } - ] -} - - -class WriteNodeKnobSettingPanel(nukescripts.PythonPanel): - """ Write Node's Knobs Settings Panel """ - def __init__(self): - nukescripts.PythonPanel.__init__(self, "Set Knobs Value(Write Node)") - - preset_name, _ = self.get_node_knobs_setting() - # create knobs - - self.selected_preset_name = nuke.Enumeration_Knob( - 'preset_selector', 'presets', preset_name) - # add knobs to panel - self.addKnob(self.selected_preset_name) - - def process(self): - """ Process the panel values. """ - write_selected_nodes = [ - selected_nodes for selected_nodes in nuke.selectedNodes() - if selected_nodes.Class() == "Write"] - - selected_preset = self.selected_preset_name.value() - ext = None - knobs = knobs_setting["knobs"] - preset_name, node_knobs_presets = ( - self.get_node_knobs_setting(selected_preset) - ) - - if selected_preset and preset_name: - if not node_knobs_presets: - nuke.message( - "No knobs value found in subset group.." - "\nDefault setting will be used..") - else: - knobs = node_knobs_presets - - ext_knob_list = [knob for knob in knobs if knob["name"] == "file_type"] - if not ext_knob_list: - nuke.message( - "ERROR: No file type found in the subset's knobs." - "\nPlease add one to complete setting up the node") - return - else: - for knob in ext_knob_list: - ext = knob["value"] - - anatomy = Anatomy(get_current_project_name()) - - frame_padding = anatomy.templates_obj.frame_padding - for write_node in write_selected_nodes: - # data for mapping the path - # TODO add more fill data - product_name = write_node["name"].value() - data = { - "work": os.getenv("AYON_WORKDIR"), - "subset": product_name, - "product": { - "name": product_name, - }, - "frame": "#" * frame_padding, - "ext": ext - } - file_path = temp_rendering_path_template.format(**data) - file_path = file_path.replace("\\", "/") - write_node["file"].setValue(file_path) - set_node_knobs_from_settings(write_node, knobs) - - def get_node_knobs_setting(self, selected_preset=None): - preset_name = [] - knobs_nodes = [] - settings = [ - node_settings for node_settings - in get_nuke_imageio_settings()["nodes"]["override_nodes"] - if node_settings["nuke_node_class"] == "Write" - and node_settings["subsets"] - ] - if not settings: - return - - for i, _ in enumerate(settings): - if selected_preset in settings[i]["subsets"]: - knobs_nodes = settings[i]["knobs"] - - for setting in settings: - # TODO change 'subsets' to 'product_names' in settings - for product_name in setting["subsets"]: - preset_name.append(product_name) - - return preset_name, knobs_nodes - - -def main(): - p_ = WriteNodeKnobSettingPanel() - if p_.showModalDialog(): - print(p_.process()) diff --git a/server_addon/nuke/client/ayon_nuke/startup/frame_setting_for_read_nodes.py b/server_addon/nuke/client/ayon_nuke/startup/frame_setting_for_read_nodes.py deleted file mode 100644 index 3e1430c3b1..0000000000 --- a/server_addon/nuke/client/ayon_nuke/startup/frame_setting_for_read_nodes.py +++ /dev/null @@ -1,47 +0,0 @@ -""" AYON custom script for resetting read nodes start frame values """ - -import nuke -import nukescripts - - -class FrameSettingsPanel(nukescripts.PythonPanel): - """ Frame Settings Panel """ - def __init__(self): - nukescripts.PythonPanel.__init__(self, "Set Frame Start (Read Node)") - - # create knobs - self.frame = nuke.Int_Knob( - 'frame', 'Frame Number') - self.selected = nuke.Boolean_Knob("selection") - # add knobs to panel - self.addKnob(self.selected) - self.addKnob(self.frame) - - # set values - self.selected.setValue(False) - self.frame.setValue(nuke.root().firstFrame()) - - def process(self): - """ Process the panel values. """ - # get values - frame = self.frame.value() - if self.selected.value(): - # selected nodes processing - if not nuke.selectedNodes(): - return - for rn_ in nuke.selectedNodes(): - if rn_.Class() != "Read": - continue - rn_["frame_mode"].setValue("start_at") - rn_["frame"].setValue(str(frame)) - else: - # all nodes processing - for rn_ in nuke.allNodes(filter="Read"): - rn_["frame_mode"].setValue("start_at") - rn_["frame"].setValue(str(frame)) - - -def main(): - p_ = FrameSettingsPanel() - if p_.showModalDialog(): - print(p_.process()) diff --git a/server_addon/nuke/client/ayon_nuke/startup/menu.py b/server_addon/nuke/client/ayon_nuke/startup/menu.py deleted file mode 100644 index c3dd8cda8f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/startup/menu.py +++ /dev/null @@ -1,5 +0,0 @@ -from ayon_core.pipeline import install_host -from ayon_nuke.api import NukeHost - -host = NukeHost() -install_host(host) diff --git a/server_addon/nuke/client/ayon_nuke/startup/write_to_read.py b/server_addon/nuke/client/ayon_nuke/startup/write_to_read.py deleted file mode 100644 index 8a8ffb8d3d..0000000000 --- a/server_addon/nuke/client/ayon_nuke/startup/write_to_read.py +++ /dev/null @@ -1,151 +0,0 @@ -import re -import os -import glob -import nuke -from ayon_core.lib import Logger -log = Logger.get_logger(__name__) - -SINGLE_FILE_FORMATS = ['avi', 'mp4', 'mxf', 'mov', 'mpg', 'mpeg', 'wmv', 'm4v', - 'm2v'] - - -def evaluate_filepath_new( - k_value, k_eval, project_dir, first_frame, allow_relative): - - # get combined relative path - combined_relative_path = None - if k_eval is not None and project_dir is not None: - combined_relative_path = os.path.abspath( - os.path.join(project_dir, k_eval)) - combined_relative_path = combined_relative_path.replace('\\', '/') - filetype = combined_relative_path.split('.')[-1] - frame_number = re.findall(r'\d+', combined_relative_path)[-1] - basename = combined_relative_path[: combined_relative_path.rfind( - frame_number)] - filepath_glob = basename + '*' + filetype - glob_search_results = glob.glob(filepath_glob) - if len(glob_search_results) <= 0: - combined_relative_path = None - - try: - # k_value = k_value % first_frame - if os.path.isdir(os.path.basename(k_value)): - # doesn't check for file, only parent dir - filepath = k_value - elif os.path.exists(k_eval): - filepath = k_eval - elif not isinstance(project_dir, type(None)) and \ - not isinstance(combined_relative_path, type(None)): - filepath = combined_relative_path - - filepath = os.path.abspath(filepath) - except Exception as E: - log.error("Cannot create Read node. Perhaps it needs to be \ - rendered first :) Error: `{}`".format(E)) - return None - - filepath = filepath.replace('\\', '/') - # assumes last number is a sequence counter - current_frame = re.findall(r'\d+', filepath)[-1] - padding = len(current_frame) - basename = filepath[: filepath.rfind(current_frame)] - filetype = filepath.split('.')[-1] - - # sequence or not? - if filetype in SINGLE_FILE_FORMATS: - pass - else: - # Image sequence needs hashes - # to do still with no number not handled - filepath = basename + '#' * padding + '.' + filetype - - # relative path? make it relative again - if allow_relative: - if (not isinstance(project_dir, type(None))) and project_dir != "": - filepath = filepath.replace(project_dir, '.') - - # get first and last frame from disk - frames = [] - firstframe = 0 - lastframe = 0 - filepath_glob = basename + '*' + filetype - glob_search_results = glob.glob(filepath_glob) - for f in glob_search_results: - frame = re.findall(r'\d+', f)[-1] - frames.append(frame) - frames = sorted(frames) - firstframe = frames[0] - lastframe = frames[len(frames) - 1] - - if int(lastframe) < 0: - lastframe = firstframe - - return filepath, firstframe, lastframe - - -def create_read_node(ndata, comp_start): - read = nuke.createNode('Read', 'file "' + ndata['filepath'] + '"') - read.knob('colorspace').setValue(int(ndata['colorspace'])) - read.knob('raw').setValue(ndata['rawdata']) - read.knob('first').setValue(int(ndata['firstframe'])) - read.knob('last').setValue(int(ndata['lastframe'])) - read.knob('origfirst').setValue(int(ndata['firstframe'])) - read.knob('origlast').setValue(int(ndata['lastframe'])) - if comp_start == int(ndata['firstframe']): - read.knob('frame_mode').setValue("1") - read.knob('frame').setValue(str(comp_start)) - else: - read.knob('frame_mode').setValue("0") - read.knob('xpos').setValue(ndata['new_xpos']) - read.knob('ypos').setValue(ndata['new_ypos']) - nuke.inputs(read, 0) - return - - -def write_to_read(gn, - allow_relative=False): - - comp_start = nuke.Root().knob('first_frame').value() - project_dir = nuke.Root().knob('project_directory').getValue() - if not os.path.exists(project_dir): - project_dir = nuke.Root().knob('project_directory').evaluate() - - group_read_nodes = [] - with gn: - height = gn.screenHeight() # get group height and position - new_xpos = int(gn.knob('xpos').value()) - new_ypos = int(gn.knob('ypos').value()) + height + 20 - group_writes = [n for n in nuke.allNodes() if n.Class() == "Write"] - if group_writes != []: - # there can be only 1 write node, taking first - n = group_writes[0] - - if n.knob('file') is not None: - myfile, firstFrame, lastFrame = evaluate_filepath_new( - n.knob('file').getValue(), - n.knob('file').evaluate(), - project_dir, - comp_start, - allow_relative - ) - if not myfile: - return - - # get node data - ndata = { - 'filepath': myfile, - 'firstframe': int(firstFrame), - 'lastframe': int(lastFrame), - 'new_xpos': new_xpos, - 'new_ypos': new_ypos, - 'colorspace': n.knob('colorspace').getValue(), - 'rawdata': n.knob('raw').value(), - 'write_frame_mode': str(n.knob('frame_mode').value()), - 'write_frame': n.knob('frame').value() - } - group_read_nodes.append(ndata) - - # create reads in one go - for oneread in group_read_nodes: - # create read node - create_read_node(oneread, comp_start) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/__init__.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/__init__.py deleted file mode 100644 index 03f3b29ee7..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# Copyright 2007 Google Inc. All Rights Reserved. - -__version__ = '3.20.1' diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/any_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/any_pb2.py deleted file mode 100644 index 9121193d11..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/any_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/any.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _ANY._serialized_start=46 - _ANY._serialized_end=84 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/api_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/api_pb2.py deleted file mode 100644 index 1721b10a75..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/api_pb2.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/api.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 -from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _API._serialized_start=113 - _API._serialized_end=370 - _METHOD._serialized_start=373 - _METHOD._serialized_end=586 - _MIXIN._serialized_start=588 - _MIXIN._serialized_end=623 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/compiler/__init__.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/compiler/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/compiler/plugin_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/compiler/plugin_pb2.py deleted file mode 100644 index 715a891370..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/compiler/plugin_pb2.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/compiler/plugin.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' - _VERSION._serialized_start=101 - _VERSION._serialized_end=171 - _CODEGENERATORREQUEST._serialized_start=174 - _CODEGENERATORREQUEST._serialized_end=360 - _CODEGENERATORRESPONSE._serialized_start=363 - _CODEGENERATORRESPONSE._serialized_end=684 - _CODEGENERATORRESPONSE_FILE._serialized_start=499 - _CODEGENERATORRESPONSE_FILE._serialized_end=626 - _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 - _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor.py deleted file mode 100644 index ad70be9a11..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor.py +++ /dev/null @@ -1,1224 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Descriptors essentially contain exactly the information found in a .proto -file, in types that make this information accessible in Python. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - -import threading -import warnings - -from google.protobuf.internal import api_implementation - -_USE_C_DESCRIPTORS = False -if api_implementation.Type() == 'cpp': - # Used by MakeDescriptor in cpp mode - import binascii - import os - from google.protobuf.pyext import _message - _USE_C_DESCRIPTORS = True - - -class Error(Exception): - """Base error for this module.""" - - -class TypeTransformationError(Error): - """Error transforming between python proto type and corresponding C++ type.""" - - -if _USE_C_DESCRIPTORS: - # This metaclass allows to override the behavior of code like - # isinstance(my_descriptor, FieldDescriptor) - # and make it return True when the descriptor is an instance of the extension - # type written in C++. - class DescriptorMetaclass(type): - def __instancecheck__(cls, obj): - if super(DescriptorMetaclass, cls).__instancecheck__(obj): - return True - if isinstance(obj, cls._C_DESCRIPTOR_CLASS): - return True - return False -else: - # The standard metaclass; nothing changes. - DescriptorMetaclass = type - - -class _Lock(object): - """Wrapper class of threading.Lock(), which is allowed by 'with'.""" - - def __new__(cls): - self = object.__new__(cls) - self._lock = threading.Lock() # pylint: disable=protected-access - return self - - def __enter__(self): - self._lock.acquire() - - def __exit__(self, exc_type, exc_value, exc_tb): - self._lock.release() - - -_lock = threading.Lock() - - -def _Deprecated(name): - if _Deprecated.count > 0: - _Deprecated.count -= 1 - warnings.warn( - 'Call to deprecated create function %s(). Note: Create unlinked ' - 'descriptors is going to go away. Please use get/find descriptors from ' - 'generated code or query the descriptor_pool.' - % name, - category=DeprecationWarning, stacklevel=3) - - -# Deprecated warnings will print 100 times at most which should be enough for -# users to notice and do not cause timeout. -_Deprecated.count = 100 - - -_internal_create_key = object() - - -class DescriptorBase(metaclass=DescriptorMetaclass): - - """Descriptors base class. - - This class is the base of all descriptor classes. It provides common options - related functionality. - - Attributes: - has_options: True if the descriptor has non-default options. Usually it - is not necessary to read this -- just call GetOptions() which will - happily return the default instance. However, it's sometimes useful - for efficiency, and also useful inside the protobuf implementation to - avoid some bootstrapping issues. - """ - - if _USE_C_DESCRIPTORS: - # The class, or tuple of classes, that are considered as "virtual - # subclasses" of this descriptor class. - _C_DESCRIPTOR_CLASS = () - - def __init__(self, options, serialized_options, options_class_name): - """Initialize the descriptor given its options message and the name of the - class of the options message. The name of the class is required in case - the options message is None and has to be created. - """ - self._options = options - self._options_class_name = options_class_name - self._serialized_options = serialized_options - - # Does this descriptor have non-default options? - self.has_options = (options is not None) or (serialized_options is not None) - - def _SetOptions(self, options, options_class_name): - """Sets the descriptor's options - - This function is used in generated proto2 files to update descriptor - options. It must not be used outside proto2. - """ - self._options = options - self._options_class_name = options_class_name - - # Does this descriptor have non-default options? - self.has_options = options is not None - - def GetOptions(self): - """Retrieves descriptor options. - - This method returns the options set or creates the default options for the - descriptor. - """ - if self._options: - return self._options - - from google.protobuf import descriptor_pb2 - try: - options_class = getattr(descriptor_pb2, - self._options_class_name) - except AttributeError: - raise RuntimeError('Unknown options class name %s!' % - (self._options_class_name)) - - with _lock: - if self._serialized_options is None: - self._options = options_class() - else: - self._options = _ParseOptions(options_class(), - self._serialized_options) - - return self._options - - -class _NestedDescriptorBase(DescriptorBase): - """Common class for descriptors that can be nested.""" - - def __init__(self, options, options_class_name, name, full_name, - file, containing_type, serialized_start=None, - serialized_end=None, serialized_options=None): - """Constructor. - - Args: - options: Protocol message options or None - to use default message options. - options_class_name (str): The class name of the above options. - name (str): Name of this protocol message type. - full_name (str): Fully-qualified name of this protocol message type, - which will include protocol "package" name and the name of any - enclosing types. - file (FileDescriptor): Reference to file info. - containing_type: if provided, this is a nested descriptor, with this - descriptor as parent, otherwise None. - serialized_start: The start index (inclusive) in block in the - file.serialized_pb that describes this descriptor. - serialized_end: The end index (exclusive) in block in the - file.serialized_pb that describes this descriptor. - serialized_options: Protocol message serialized options or None. - """ - super(_NestedDescriptorBase, self).__init__( - options, serialized_options, options_class_name) - - self.name = name - # TODO(falk): Add function to calculate full_name instead of having it in - # memory? - self.full_name = full_name - self.file = file - self.containing_type = containing_type - - self._serialized_start = serialized_start - self._serialized_end = serialized_end - - def CopyToProto(self, proto): - """Copies this to the matching proto in descriptor_pb2. - - Args: - proto: An empty proto instance from descriptor_pb2. - - Raises: - Error: If self couldn't be serialized, due to to few constructor - arguments. - """ - if (self.file is not None and - self._serialized_start is not None and - self._serialized_end is not None): - proto.ParseFromString(self.file.serialized_pb[ - self._serialized_start:self._serialized_end]) - else: - raise Error('Descriptor does not contain serialization.') - - -class Descriptor(_NestedDescriptorBase): - - """Descriptor for a protocol message type. - - Attributes: - name (str): Name of this protocol message type. - full_name (str): Fully-qualified name of this protocol message type, - which will include protocol "package" name and the name of any - enclosing types. - containing_type (Descriptor): Reference to the descriptor of the type - containing us, or None if this is top-level. - fields (list[FieldDescriptor]): Field descriptors for all fields in - this type. - fields_by_number (dict(int, FieldDescriptor)): Same - :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed - by "number" attribute in each FieldDescriptor. - fields_by_name (dict(str, FieldDescriptor)): Same - :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by - "name" attribute in each :class:`FieldDescriptor`. - nested_types (list[Descriptor]): Descriptor references - for all protocol message types nested within this one. - nested_types_by_name (dict(str, Descriptor)): Same Descriptor - objects as in :attr:`nested_types`, but indexed by "name" attribute - in each Descriptor. - enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references - for all enums contained within this type. - enum_types_by_name (dict(str, EnumDescriptor)): Same - :class:`EnumDescriptor` objects as in :attr:`enum_types`, but - indexed by "name" attribute in each EnumDescriptor. - enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping - from enum value name to :class:`EnumValueDescriptor` for that value. - extensions (list[FieldDescriptor]): All extensions defined directly - within this message type (NOT within a nested type). - extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor - objects as :attr:`extensions`, but indexed by "name" attribute of each - FieldDescriptor. - is_extendable (bool): Does this type define any extension ranges? - oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields - in this message. - oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in - :attr:`oneofs`, but indexed by "name" attribute. - file (FileDescriptor): Reference to file descriptor. - - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.Descriptor - - def __new__( - cls, - name=None, - full_name=None, - filename=None, - containing_type=None, - fields=None, - nested_types=None, - enum_types=None, - extensions=None, - options=None, - serialized_options=None, - is_extendable=True, - extension_ranges=None, - oneofs=None, - file=None, # pylint: disable=redefined-builtin - serialized_start=None, - serialized_end=None, - syntax=None, - create_key=None): - _message.Message._CheckCalledFromGeneratedFile() - return _message.default_pool.FindMessageTypeByName(full_name) - - # NOTE(tmarek): The file argument redefining a builtin is nothing we can - # fix right now since we don't know how many clients already rely on the - # name of the argument. - def __init__(self, name, full_name, filename, containing_type, fields, - nested_types, enum_types, extensions, options=None, - serialized_options=None, - is_extendable=True, extension_ranges=None, oneofs=None, - file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin - syntax=None, create_key=None): - """Arguments to __init__() are as described in the description - of Descriptor fields above. - - Note that filename is an obsolete argument, that is not used anymore. - Please use file.name to access this as an attribute. - """ - if create_key is not _internal_create_key: - _Deprecated('Descriptor') - - super(Descriptor, self).__init__( - options, 'MessageOptions', name, full_name, file, - containing_type, serialized_start=serialized_start, - serialized_end=serialized_end, serialized_options=serialized_options) - - # We have fields in addition to fields_by_name and fields_by_number, - # so that: - # 1. Clients can index fields by "order in which they're listed." - # 2. Clients can easily iterate over all fields with the terse - # syntax: for f in descriptor.fields: ... - self.fields = fields - for field in self.fields: - field.containing_type = self - self.fields_by_number = dict((f.number, f) for f in fields) - self.fields_by_name = dict((f.name, f) for f in fields) - self._fields_by_camelcase_name = None - - self.nested_types = nested_types - for nested_type in nested_types: - nested_type.containing_type = self - self.nested_types_by_name = dict((t.name, t) for t in nested_types) - - self.enum_types = enum_types - for enum_type in self.enum_types: - enum_type.containing_type = self - self.enum_types_by_name = dict((t.name, t) for t in enum_types) - self.enum_values_by_name = dict( - (v.name, v) for t in enum_types for v in t.values) - - self.extensions = extensions - for extension in self.extensions: - extension.extension_scope = self - self.extensions_by_name = dict((f.name, f) for f in extensions) - self.is_extendable = is_extendable - self.extension_ranges = extension_ranges - self.oneofs = oneofs if oneofs is not None else [] - self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) - for oneof in self.oneofs: - oneof.containing_type = self - self.syntax = syntax or "proto2" - - @property - def fields_by_camelcase_name(self): - """Same FieldDescriptor objects as in :attr:`fields`, but indexed by - :attr:`FieldDescriptor.camelcase_name`. - """ - if self._fields_by_camelcase_name is None: - self._fields_by_camelcase_name = dict( - (f.camelcase_name, f) for f in self.fields) - return self._fields_by_camelcase_name - - def EnumValueName(self, enum, value): - """Returns the string name of an enum value. - - This is just a small helper method to simplify a common operation. - - Args: - enum: string name of the Enum. - value: int, value of the enum. - - Returns: - string name of the enum value. - - Raises: - KeyError if either the Enum doesn't exist or the value is not a valid - value for the enum. - """ - return self.enum_types_by_name[enum].values_by_number[value].name - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.DescriptorProto. - - Args: - proto: An empty descriptor_pb2.DescriptorProto. - """ - # This function is overridden to give a better doc comment. - super(Descriptor, self).CopyToProto(proto) - - -# TODO(robinson): We should have aggressive checking here, -# for example: -# * If you specify a repeated field, you should not be allowed -# to specify a default value. -# * [Other examples here as needed]. -# -# TODO(robinson): for this and other *Descriptor classes, we -# might also want to lock things down aggressively (e.g., -# prevent clients from setting the attributes). Having -# stronger invariants here in general will reduce the number -# of runtime checks we must do in reflection.py... -class FieldDescriptor(DescriptorBase): - - """Descriptor for a single field in a .proto file. - - Attributes: - name (str): Name of this field, exactly as it appears in .proto. - full_name (str): Name of this field, including containing scope. This is - particularly relevant for extensions. - index (int): Dense, 0-indexed index giving the order that this - field textually appears within its message in the .proto file. - number (int): Tag number declared for this field in the .proto file. - - type (int): (One of the TYPE_* constants below) Declared type. - cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to - represent this field. - - label (int): (One of the LABEL_* constants below) Tells whether this - field is optional, required, or repeated. - has_default_value (bool): True if this field has a default value defined, - otherwise false. - default_value (Varies): Default value of this field. Only - meaningful for non-repeated scalar fields. Repeated fields - should always set this to [], and non-repeated composite - fields should always set this to None. - - containing_type (Descriptor): Descriptor of the protocol message - type that contains this field. Set by the Descriptor constructor - if we're passed into one. - Somewhat confusingly, for extension fields, this is the - descriptor of the EXTENDED message, not the descriptor - of the message containing this field. (See is_extension and - extension_scope below). - message_type (Descriptor): If a composite field, a descriptor - of the message type contained in this field. Otherwise, this is None. - enum_type (EnumDescriptor): If this field contains an enum, a - descriptor of that enum. Otherwise, this is None. - - is_extension: True iff this describes an extension field. - extension_scope (Descriptor): Only meaningful if is_extension is True. - Gives the message that immediately contains this extension field. - Will be None iff we're a top-level (file-level) extension field. - - options (descriptor_pb2.FieldOptions): Protocol message field options or - None to use default field options. - - containing_oneof (OneofDescriptor): If the field is a member of a oneof - union, contains its descriptor. Otherwise, None. - - file (FileDescriptor): Reference to file descriptor. - """ - - # Must be consistent with C++ FieldDescriptor::Type enum in - # descriptor.h. - # - # TODO(robinson): Find a way to eliminate this repetition. - TYPE_DOUBLE = 1 - TYPE_FLOAT = 2 - TYPE_INT64 = 3 - TYPE_UINT64 = 4 - TYPE_INT32 = 5 - TYPE_FIXED64 = 6 - TYPE_FIXED32 = 7 - TYPE_BOOL = 8 - TYPE_STRING = 9 - TYPE_GROUP = 10 - TYPE_MESSAGE = 11 - TYPE_BYTES = 12 - TYPE_UINT32 = 13 - TYPE_ENUM = 14 - TYPE_SFIXED32 = 15 - TYPE_SFIXED64 = 16 - TYPE_SINT32 = 17 - TYPE_SINT64 = 18 - MAX_TYPE = 18 - - # Must be consistent with C++ FieldDescriptor::CppType enum in - # descriptor.h. - # - # TODO(robinson): Find a way to eliminate this repetition. - CPPTYPE_INT32 = 1 - CPPTYPE_INT64 = 2 - CPPTYPE_UINT32 = 3 - CPPTYPE_UINT64 = 4 - CPPTYPE_DOUBLE = 5 - CPPTYPE_FLOAT = 6 - CPPTYPE_BOOL = 7 - CPPTYPE_ENUM = 8 - CPPTYPE_STRING = 9 - CPPTYPE_MESSAGE = 10 - MAX_CPPTYPE = 10 - - _PYTHON_TO_CPP_PROTO_TYPE_MAP = { - TYPE_DOUBLE: CPPTYPE_DOUBLE, - TYPE_FLOAT: CPPTYPE_FLOAT, - TYPE_ENUM: CPPTYPE_ENUM, - TYPE_INT64: CPPTYPE_INT64, - TYPE_SINT64: CPPTYPE_INT64, - TYPE_SFIXED64: CPPTYPE_INT64, - TYPE_UINT64: CPPTYPE_UINT64, - TYPE_FIXED64: CPPTYPE_UINT64, - TYPE_INT32: CPPTYPE_INT32, - TYPE_SFIXED32: CPPTYPE_INT32, - TYPE_SINT32: CPPTYPE_INT32, - TYPE_UINT32: CPPTYPE_UINT32, - TYPE_FIXED32: CPPTYPE_UINT32, - TYPE_BYTES: CPPTYPE_STRING, - TYPE_STRING: CPPTYPE_STRING, - TYPE_BOOL: CPPTYPE_BOOL, - TYPE_MESSAGE: CPPTYPE_MESSAGE, - TYPE_GROUP: CPPTYPE_MESSAGE - } - - # Must be consistent with C++ FieldDescriptor::Label enum in - # descriptor.h. - # - # TODO(robinson): Find a way to eliminate this repetition. - LABEL_OPTIONAL = 1 - LABEL_REQUIRED = 2 - LABEL_REPEATED = 3 - MAX_LABEL = 3 - - # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber, - # and kLastReservedNumber in descriptor.h - MAX_FIELD_NUMBER = (1 << 29) - 1 - FIRST_RESERVED_FIELD_NUMBER = 19000 - LAST_RESERVED_FIELD_NUMBER = 19999 - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.FieldDescriptor - - def __new__(cls, name, full_name, index, number, type, cpp_type, label, - default_value, message_type, enum_type, containing_type, - is_extension, extension_scope, options=None, - serialized_options=None, - has_default_value=True, containing_oneof=None, json_name=None, - file=None, create_key=None): # pylint: disable=redefined-builtin - _message.Message._CheckCalledFromGeneratedFile() - if is_extension: - return _message.default_pool.FindExtensionByName(full_name) - else: - return _message.default_pool.FindFieldByName(full_name) - - def __init__(self, name, full_name, index, number, type, cpp_type, label, - default_value, message_type, enum_type, containing_type, - is_extension, extension_scope, options=None, - serialized_options=None, - has_default_value=True, containing_oneof=None, json_name=None, - file=None, create_key=None): # pylint: disable=redefined-builtin - """The arguments are as described in the description of FieldDescriptor - attributes above. - - Note that containing_type may be None, and may be set later if necessary - (to deal with circular references between message types, for example). - Likewise for extension_scope. - """ - if create_key is not _internal_create_key: - _Deprecated('FieldDescriptor') - - super(FieldDescriptor, self).__init__( - options, serialized_options, 'FieldOptions') - self.name = name - self.full_name = full_name - self.file = file - self._camelcase_name = None - if json_name is None: - self.json_name = _ToJsonName(name) - else: - self.json_name = json_name - self.index = index - self.number = number - self.type = type - self.cpp_type = cpp_type - self.label = label - self.has_default_value = has_default_value - self.default_value = default_value - self.containing_type = containing_type - self.message_type = message_type - self.enum_type = enum_type - self.is_extension = is_extension - self.extension_scope = extension_scope - self.containing_oneof = containing_oneof - if api_implementation.Type() == 'cpp': - if is_extension: - self._cdescriptor = _message.default_pool.FindExtensionByName(full_name) - else: - self._cdescriptor = _message.default_pool.FindFieldByName(full_name) - else: - self._cdescriptor = None - - @property - def camelcase_name(self): - """Camelcase name of this field. - - Returns: - str: the name in CamelCase. - """ - if self._camelcase_name is None: - self._camelcase_name = _ToCamelCase(self.name) - return self._camelcase_name - - @property - def has_presence(self): - """Whether the field distinguishes between unpopulated and default values. - - Raises: - RuntimeError: singular field that is not linked with message nor file. - """ - if self.label == FieldDescriptor.LABEL_REPEATED: - return False - if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or - self.containing_oneof): - return True - if hasattr(self.file, 'syntax'): - return self.file.syntax == 'proto2' - if hasattr(self.message_type, 'syntax'): - return self.message_type.syntax == 'proto2' - raise RuntimeError( - 'has_presence is not ready to use because field %s is not' - ' linked with message type nor file' % self.full_name) - - @staticmethod - def ProtoTypeToCppProtoType(proto_type): - """Converts from a Python proto type to a C++ Proto Type. - - The Python ProtocolBuffer classes specify both the 'Python' datatype and the - 'C++' datatype - and they're not the same. This helper method should - translate from one to another. - - Args: - proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) - Returns: - int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. - Raises: - TypeTransformationError: when the Python proto type isn't known. - """ - try: - return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] - except KeyError: - raise TypeTransformationError('Unknown proto_type: %s' % proto_type) - - -class EnumDescriptor(_NestedDescriptorBase): - - """Descriptor for an enum defined in a .proto file. - - Attributes: - name (str): Name of the enum type. - full_name (str): Full name of the type, including package name - and any enclosing type(s). - - values (list[EnumValueDescriptor]): List of the values - in this enum. - values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`, - but indexed by the "name" field of each EnumValueDescriptor. - values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`, - but indexed by the "number" field of each EnumValueDescriptor. - containing_type (Descriptor): Descriptor of the immediate containing - type of this enum, or None if this is an enum defined at the - top level in a .proto file. Set by Descriptor's constructor - if we're passed into one. - file (FileDescriptor): Reference to file descriptor. - options (descriptor_pb2.EnumOptions): Enum options message or - None to use default enum options. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.EnumDescriptor - - def __new__(cls, name, full_name, filename, values, - containing_type=None, options=None, - serialized_options=None, file=None, # pylint: disable=redefined-builtin - serialized_start=None, serialized_end=None, create_key=None): - _message.Message._CheckCalledFromGeneratedFile() - return _message.default_pool.FindEnumTypeByName(full_name) - - def __init__(self, name, full_name, filename, values, - containing_type=None, options=None, - serialized_options=None, file=None, # pylint: disable=redefined-builtin - serialized_start=None, serialized_end=None, create_key=None): - """Arguments are as described in the attribute description above. - - Note that filename is an obsolete argument, that is not used anymore. - Please use file.name to access this as an attribute. - """ - if create_key is not _internal_create_key: - _Deprecated('EnumDescriptor') - - super(EnumDescriptor, self).__init__( - options, 'EnumOptions', name, full_name, file, - containing_type, serialized_start=serialized_start, - serialized_end=serialized_end, serialized_options=serialized_options) - - self.values = values - for value in self.values: - value.type = self - self.values_by_name = dict((v.name, v) for v in values) - # Values are reversed to ensure that the first alias is retained. - self.values_by_number = dict((v.number, v) for v in reversed(values)) - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.EnumDescriptorProto. - - Args: - proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto. - """ - # This function is overridden to give a better doc comment. - super(EnumDescriptor, self).CopyToProto(proto) - - -class EnumValueDescriptor(DescriptorBase): - - """Descriptor for a single value within an enum. - - Attributes: - name (str): Name of this value. - index (int): Dense, 0-indexed index giving the order that this - value appears textually within its enum in the .proto file. - number (int): Actual number assigned to this enum value. - type (EnumDescriptor): :class:`EnumDescriptor` to which this value - belongs. Set by :class:`EnumDescriptor`'s constructor if we're - passed into one. - options (descriptor_pb2.EnumValueOptions): Enum value options message or - None to use default enum value options options. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor - - def __new__(cls, name, index, number, - type=None, # pylint: disable=redefined-builtin - options=None, serialized_options=None, create_key=None): - _message.Message._CheckCalledFromGeneratedFile() - # There is no way we can build a complete EnumValueDescriptor with the - # given parameters (the name of the Enum is not known, for example). - # Fortunately generated files just pass it to the EnumDescriptor() - # constructor, which will ignore it, so returning None is good enough. - return None - - def __init__(self, name, index, number, - type=None, # pylint: disable=redefined-builtin - options=None, serialized_options=None, create_key=None): - """Arguments are as described in the attribute description above.""" - if create_key is not _internal_create_key: - _Deprecated('EnumValueDescriptor') - - super(EnumValueDescriptor, self).__init__( - options, serialized_options, 'EnumValueOptions') - self.name = name - self.index = index - self.number = number - self.type = type - - -class OneofDescriptor(DescriptorBase): - """Descriptor for a oneof field. - - Attributes: - name (str): Name of the oneof field. - full_name (str): Full name of the oneof field, including package name. - index (int): 0-based index giving the order of the oneof field inside - its containing type. - containing_type (Descriptor): :class:`Descriptor` of the protocol message - type that contains this field. Set by the :class:`Descriptor` constructor - if we're passed into one. - fields (list[FieldDescriptor]): The list of field descriptors this - oneof can contain. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.OneofDescriptor - - def __new__( - cls, name, full_name, index, containing_type, fields, options=None, - serialized_options=None, create_key=None): - _message.Message._CheckCalledFromGeneratedFile() - return _message.default_pool.FindOneofByName(full_name) - - def __init__( - self, name, full_name, index, containing_type, fields, options=None, - serialized_options=None, create_key=None): - """Arguments are as described in the attribute description above.""" - if create_key is not _internal_create_key: - _Deprecated('OneofDescriptor') - - super(OneofDescriptor, self).__init__( - options, serialized_options, 'OneofOptions') - self.name = name - self.full_name = full_name - self.index = index - self.containing_type = containing_type - self.fields = fields - - -class ServiceDescriptor(_NestedDescriptorBase): - - """Descriptor for a service. - - Attributes: - name (str): Name of the service. - full_name (str): Full name of the service, including package name. - index (int): 0-indexed index giving the order that this services - definition appears within the .proto file. - methods (list[MethodDescriptor]): List of methods provided by this - service. - methods_by_name (dict(str, MethodDescriptor)): Same - :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but - indexed by "name" attribute in each :class:`MethodDescriptor`. - options (descriptor_pb2.ServiceOptions): Service options message or - None to use default service options. - file (FileDescriptor): Reference to file info. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor - - def __new__( - cls, - name=None, - full_name=None, - index=None, - methods=None, - options=None, - serialized_options=None, - file=None, # pylint: disable=redefined-builtin - serialized_start=None, - serialized_end=None, - create_key=None): - _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access - return _message.default_pool.FindServiceByName(full_name) - - def __init__(self, name, full_name, index, methods, options=None, - serialized_options=None, file=None, # pylint: disable=redefined-builtin - serialized_start=None, serialized_end=None, create_key=None): - if create_key is not _internal_create_key: - _Deprecated('ServiceDescriptor') - - super(ServiceDescriptor, self).__init__( - options, 'ServiceOptions', name, full_name, file, - None, serialized_start=serialized_start, - serialized_end=serialized_end, serialized_options=serialized_options) - self.index = index - self.methods = methods - self.methods_by_name = dict((m.name, m) for m in methods) - # Set the containing service for each method in this service. - for method in self.methods: - method.containing_service = self - - def FindMethodByName(self, name): - """Searches for the specified method, and returns its descriptor. - - Args: - name (str): Name of the method. - Returns: - MethodDescriptor or None: the descriptor for the requested method, if - found. - """ - return self.methods_by_name.get(name, None) - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.ServiceDescriptorProto. - - Args: - proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto. - """ - # This function is overridden to give a better doc comment. - super(ServiceDescriptor, self).CopyToProto(proto) - - -class MethodDescriptor(DescriptorBase): - - """Descriptor for a method in a service. - - Attributes: - name (str): Name of the method within the service. - full_name (str): Full name of method. - index (int): 0-indexed index of the method inside the service. - containing_service (ServiceDescriptor): The service that contains this - method. - input_type (Descriptor): The descriptor of the message that this method - accepts. - output_type (Descriptor): The descriptor of the message that this method - returns. - client_streaming (bool): Whether this method uses client streaming. - server_streaming (bool): Whether this method uses server streaming. - options (descriptor_pb2.MethodOptions or None): Method options message, or - None to use default method options. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.MethodDescriptor - - def __new__(cls, - name, - full_name, - index, - containing_service, - input_type, - output_type, - client_streaming=False, - server_streaming=False, - options=None, - serialized_options=None, - create_key=None): - _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access - return _message.default_pool.FindMethodByName(full_name) - - def __init__(self, - name, - full_name, - index, - containing_service, - input_type, - output_type, - client_streaming=False, - server_streaming=False, - options=None, - serialized_options=None, - create_key=None): - """The arguments are as described in the description of MethodDescriptor - attributes above. - - Note that containing_service may be None, and may be set later if necessary. - """ - if create_key is not _internal_create_key: - _Deprecated('MethodDescriptor') - - super(MethodDescriptor, self).__init__( - options, serialized_options, 'MethodOptions') - self.name = name - self.full_name = full_name - self.index = index - self.containing_service = containing_service - self.input_type = input_type - self.output_type = output_type - self.client_streaming = client_streaming - self.server_streaming = server_streaming - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.MethodDescriptorProto. - - Args: - proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto. - - Raises: - Error: If self couldn't be serialized, due to too few constructor - arguments. - """ - if self.containing_service is not None: - from google.protobuf import descriptor_pb2 - service_proto = descriptor_pb2.ServiceDescriptorProto() - self.containing_service.CopyToProto(service_proto) - proto.CopyFrom(service_proto.method[self.index]) - else: - raise Error('Descriptor does not contain a service.') - - -class FileDescriptor(DescriptorBase): - """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. - - Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and - :attr:`dependencies` fields are only set by the - :py:mod:`google.protobuf.message_factory` module, and not by the generated - proto code. - - Attributes: - name (str): Name of file, relative to root of source tree. - package (str): Name of the package - syntax (str): string indicating syntax of the file (can be "proto2" or - "proto3") - serialized_pb (bytes): Byte string of serialized - :class:`descriptor_pb2.FileDescriptorProto`. - dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor` - objects this :class:`FileDescriptor` depends on. - public_dependencies (list[FileDescriptor]): A subset of - :attr:`dependencies`, which were declared as "public". - message_types_by_name (dict(str, Descriptor)): Mapping from message names - to their :class:`Descriptor`. - enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to - their :class:`EnumDescriptor`. - extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension - names declared at file scope to their :class:`FieldDescriptor`. - services_by_name (dict(str, ServiceDescriptor)): Mapping from services' - names to their :class:`ServiceDescriptor`. - pool (DescriptorPool): The pool this descriptor belongs to. When not - passed to the constructor, the global default pool is used. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.FileDescriptor - - def __new__(cls, name, package, options=None, - serialized_options=None, serialized_pb=None, - dependencies=None, public_dependencies=None, - syntax=None, pool=None, create_key=None): - # FileDescriptor() is called from various places, not only from generated - # files, to register dynamic proto files and messages. - # pylint: disable=g-explicit-bool-comparison - if serialized_pb == b'': - # Cpp generated code must be linked in if serialized_pb is '' - try: - return _message.default_pool.FindFileByName(name) - except KeyError: - raise RuntimeError('Please link in cpp generated lib for %s' % (name)) - elif serialized_pb: - return _message.default_pool.AddSerializedFile(serialized_pb) - else: - return super(FileDescriptor, cls).__new__(cls) - - def __init__(self, name, package, options=None, - serialized_options=None, serialized_pb=None, - dependencies=None, public_dependencies=None, - syntax=None, pool=None, create_key=None): - """Constructor.""" - if create_key is not _internal_create_key: - _Deprecated('FileDescriptor') - - super(FileDescriptor, self).__init__( - options, serialized_options, 'FileOptions') - - if pool is None: - from google.protobuf import descriptor_pool - pool = descriptor_pool.Default() - self.pool = pool - self.message_types_by_name = {} - self.name = name - self.package = package - self.syntax = syntax or "proto2" - self.serialized_pb = serialized_pb - - self.enum_types_by_name = {} - self.extensions_by_name = {} - self.services_by_name = {} - self.dependencies = (dependencies or []) - self.public_dependencies = (public_dependencies or []) - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.FileDescriptorProto. - - Args: - proto: An empty descriptor_pb2.FileDescriptorProto. - """ - proto.ParseFromString(self.serialized_pb) - - -def _ParseOptions(message, string): - """Parses serialized options. - - This helper function is used to parse serialized options in generated - proto2 files. It must not be used outside proto2. - """ - message.ParseFromString(string) - return message - - -def _ToCamelCase(name): - """Converts name to camel-case and returns it.""" - capitalize_next = False - result = [] - - for c in name: - if c == '_': - if result: - capitalize_next = True - elif capitalize_next: - result.append(c.upper()) - capitalize_next = False - else: - result += c - - # Lower-case the first letter. - if result and result[0].isupper(): - result[0] = result[0].lower() - return ''.join(result) - - -def _OptionsOrNone(descriptor_proto): - """Returns the value of the field `options`, or None if it is not set.""" - if descriptor_proto.HasField('options'): - return descriptor_proto.options - else: - return None - - -def _ToJsonName(name): - """Converts name to Json name and returns it.""" - capitalize_next = False - result = [] - - for c in name: - if c == '_': - capitalize_next = True - elif capitalize_next: - result.append(c.upper()) - capitalize_next = False - else: - result += c - - return ''.join(result) - - -def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, - syntax=None): - """Make a protobuf Descriptor given a DescriptorProto protobuf. - - Handles nested descriptors. Note that this is limited to the scope of defining - a message inside of another message. Composite fields can currently only be - resolved if the message is defined in the same scope as the field. - - Args: - desc_proto: The descriptor_pb2.DescriptorProto protobuf message. - package: Optional package name for the new message Descriptor (string). - build_file_if_cpp: Update the C++ descriptor pool if api matches. - Set to False on recursion, so no duplicates are created. - syntax: The syntax/semantics that should be used. Set to "proto3" to get - proto3 field presence semantics. - Returns: - A Descriptor for protobuf messages. - """ - if api_implementation.Type() == 'cpp' and build_file_if_cpp: - # The C++ implementation requires all descriptors to be backed by the same - # definition in the C++ descriptor pool. To do this, we build a - # FileDescriptorProto with the same definition as this descriptor and build - # it into the pool. - from google.protobuf import descriptor_pb2 - file_descriptor_proto = descriptor_pb2.FileDescriptorProto() - file_descriptor_proto.message_type.add().MergeFrom(desc_proto) - - # Generate a random name for this proto file to prevent conflicts with any - # imported ones. We need to specify a file name so the descriptor pool - # accepts our FileDescriptorProto, but it is not important what that file - # name is actually set to. - proto_name = binascii.hexlify(os.urandom(16)).decode('ascii') - - if package: - file_descriptor_proto.name = os.path.join(package.replace('.', '/'), - proto_name + '.proto') - file_descriptor_proto.package = package - else: - file_descriptor_proto.name = proto_name + '.proto' - - _message.default_pool.Add(file_descriptor_proto) - result = _message.default_pool.FindFileByName(file_descriptor_proto.name) - - if _USE_C_DESCRIPTORS: - return result.message_types_by_name[desc_proto.name] - - full_message_name = [desc_proto.name] - if package: full_message_name.insert(0, package) - - # Create Descriptors for enum types - enum_types = {} - for enum_proto in desc_proto.enum_type: - full_name = '.'.join(full_message_name + [enum_proto.name]) - enum_desc = EnumDescriptor( - enum_proto.name, full_name, None, [ - EnumValueDescriptor(enum_val.name, ii, enum_val.number, - create_key=_internal_create_key) - for ii, enum_val in enumerate(enum_proto.value)], - create_key=_internal_create_key) - enum_types[full_name] = enum_desc - - # Create Descriptors for nested types - nested_types = {} - for nested_proto in desc_proto.nested_type: - full_name = '.'.join(full_message_name + [nested_proto.name]) - # Nested types are just those defined inside of the message, not all types - # used by fields in the message, so no loops are possible here. - nested_desc = MakeDescriptor(nested_proto, - package='.'.join(full_message_name), - build_file_if_cpp=False, - syntax=syntax) - nested_types[full_name] = nested_desc - - fields = [] - for field_proto in desc_proto.field: - full_name = '.'.join(full_message_name + [field_proto.name]) - enum_desc = None - nested_desc = None - if field_proto.json_name: - json_name = field_proto.json_name - else: - json_name = None - if field_proto.HasField('type_name'): - type_name = field_proto.type_name - full_type_name = '.'.join(full_message_name + - [type_name[type_name.rfind('.')+1:]]) - if full_type_name in nested_types: - nested_desc = nested_types[full_type_name] - elif full_type_name in enum_types: - enum_desc = enum_types[full_type_name] - # Else type_name references a non-local type, which isn't implemented - field = FieldDescriptor( - field_proto.name, full_name, field_proto.number - 1, - field_proto.number, field_proto.type, - FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), - field_proto.label, None, nested_desc, enum_desc, None, False, None, - options=_OptionsOrNone(field_proto), has_default_value=False, - json_name=json_name, create_key=_internal_create_key) - fields.append(field) - - desc_name = '.'.join(full_message_name) - return Descriptor(desc_proto.name, desc_name, None, None, fields, - list(nested_types.values()), list(enum_types.values()), [], - options=_OptionsOrNone(desc_proto), - create_key=_internal_create_key) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_database.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_database.py deleted file mode 100644 index 073eddc711..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_database.py +++ /dev/null @@ -1,177 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Provides a container for DescriptorProtos.""" - -__author__ = 'matthewtoia@google.com (Matt Toia)' - -import warnings - - -class Error(Exception): - pass - - -class DescriptorDatabaseConflictingDefinitionError(Error): - """Raised when a proto is added with the same name & different descriptor.""" - - -class DescriptorDatabase(object): - """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" - - def __init__(self): - self._file_desc_protos_by_file = {} - self._file_desc_protos_by_symbol = {} - - def Add(self, file_desc_proto): - """Adds the FileDescriptorProto and its types to this database. - - Args: - file_desc_proto: The FileDescriptorProto to add. - Raises: - DescriptorDatabaseConflictingDefinitionError: if an attempt is made to - add a proto with the same name but different definition than an - existing proto in the database. - """ - proto_name = file_desc_proto.name - if proto_name not in self._file_desc_protos_by_file: - self._file_desc_protos_by_file[proto_name] = file_desc_proto - elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: - raise DescriptorDatabaseConflictingDefinitionError( - '%s already added, but with different descriptor.' % proto_name) - else: - return - - # Add all the top-level descriptors to the index. - package = file_desc_proto.package - for message in file_desc_proto.message_type: - for name in _ExtractSymbols(message, package): - self._AddSymbol(name, file_desc_proto) - for enum in file_desc_proto.enum_type: - self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) - for enum_value in enum.value: - self._file_desc_protos_by_symbol[ - '.'.join((package, enum_value.name))] = file_desc_proto - for extension in file_desc_proto.extension: - self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) - for service in file_desc_proto.service: - self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) - - def FindFileByName(self, name): - """Finds the file descriptor proto by file name. - - Typically the file name is a relative path ending to a .proto file. The - proto with the given name will have to have been added to this database - using the Add method or else an error will be raised. - - Args: - name: The file name to find. - - Returns: - The file descriptor proto matching the name. - - Raises: - KeyError if no file by the given name was added. - """ - - return self._file_desc_protos_by_file[name] - - def FindFileContainingSymbol(self, symbol): - """Finds the file descriptor proto containing the specified symbol. - - The symbol should be a fully qualified name including the file descriptor's - package and any containing messages. Some examples: - - 'some.package.name.Message' - 'some.package.name.Message.NestedEnum' - 'some.package.name.Message.some_field' - - The file descriptor proto containing the specified symbol must be added to - this database using the Add method or else an error will be raised. - - Args: - symbol: The fully qualified symbol name. - - Returns: - The file descriptor proto containing the symbol. - - Raises: - KeyError if no file contains the specified symbol. - """ - try: - return self._file_desc_protos_by_symbol[symbol] - except KeyError: - # Fields, enum values, and nested extensions are not in - # _file_desc_protos_by_symbol. Try to find the top level - # descriptor. Non-existent nested symbol under a valid top level - # descriptor can also be found. The behavior is the same with - # protobuf C++. - top_level, _, _ = symbol.rpartition('.') - try: - return self._file_desc_protos_by_symbol[top_level] - except KeyError: - # Raise the original symbol as a KeyError for better diagnostics. - raise KeyError(symbol) - - def FindFileContainingExtension(self, extendee_name, extension_number): - # TODO(jieluo): implement this API. - return None - - def FindAllExtensionNumbers(self, extendee_name): - # TODO(jieluo): implement this API. - return [] - - def _AddSymbol(self, name, file_desc_proto): - if name in self._file_desc_protos_by_symbol: - warn_msg = ('Conflict register for file "' + file_desc_proto.name + - '": ' + name + - ' is already defined in file "' + - self._file_desc_protos_by_symbol[name].name + '"') - warnings.warn(warn_msg, RuntimeWarning) - self._file_desc_protos_by_symbol[name] = file_desc_proto - - -def _ExtractSymbols(desc_proto, package): - """Pulls out all the symbols from a descriptor proto. - - Args: - desc_proto: The proto to extract symbols from. - package: The package containing the descriptor type. - - Yields: - The fully qualified name found in the descriptor. - """ - message_name = package + '.' + desc_proto.name if package else desc_proto.name - yield message_name - for nested_type in desc_proto.nested_type: - for symbol in _ExtractSymbols(nested_type, message_name): - yield symbol - for enum_type in desc_proto.enum_type: - yield '.'.join((message_name, enum_type.name)) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pb2.py deleted file mode 100644 index f570386432..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pb2.py +++ /dev/null @@ -1,1925 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/descriptor.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -if _descriptor._USE_C_DESCRIPTORS == False: - DESCRIPTOR = _descriptor.FileDescriptor( - name='google/protobuf/descriptor.proto', - package='google.protobuf', - syntax='proto2', - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection' - ) -else: - DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"G\n\x11\x46ileDescriptorSet\x12\x32\n\x04\x66ile\x18\x01 \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xdb\x03\n\x13\x46ileDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07package\x18\x02 \x01(\t\x12\x12\n\ndependency\x18\x03 \x03(\t\x12\x19\n\x11public_dependency\x18\n \x03(\x05\x12\x17\n\x0fweak_dependency\x18\x0b \x03(\x05\x12\x36\n\x0cmessage_type\x18\x04 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x05 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12\x38\n\x07service\x18\x06 \x03(\x0b\x32\'.google.protobuf.ServiceDescriptorProto\x12\x38\n\textension\x18\x07 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12-\n\x07options\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.FileOptions\x12\x39\n\x10source_code_info\x18\t \x01(\x0b\x32\x1f.google.protobuf.SourceCodeInfo\x12\x0e\n\x06syntax\x18\x0c \x01(\t\"\xa9\x05\n\x0f\x44\x65scriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x05\x66ield\x18\x02 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x38\n\textension\x18\x06 \x03(\x0b\x32%.google.protobuf.FieldDescriptorProto\x12\x35\n\x0bnested_type\x18\x03 \x03(\x0b\x32 .google.protobuf.DescriptorProto\x12\x37\n\tenum_type\x18\x04 \x03(\x0b\x32$.google.protobuf.EnumDescriptorProto\x12H\n\x0f\x65xtension_range\x18\x05 \x03(\x0b\x32/.google.protobuf.DescriptorProto.ExtensionRange\x12\x39\n\noneof_decl\x18\x08 \x03(\x0b\x32%.google.protobuf.OneofDescriptorProto\x12\x30\n\x07options\x18\x07 \x01(\x0b\x32\x1f.google.protobuf.MessageOptions\x12\x46\n\x0ereserved_range\x18\t \x03(\x0b\x32..google.protobuf.DescriptorProto.ReservedRange\x12\x15\n\rreserved_name\x18\n \x03(\t\x1a\x65\n\x0e\x45xtensionRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12\x37\n\x07options\x18\x03 \x01(\x0b\x32&.google.protobuf.ExtensionRangeOptions\x1a+\n\rReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"g\n\x15\x45xtensionRangeOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xd5\x05\n\x14\x46ieldDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12:\n\x05label\x18\x04 \x01(\x0e\x32+.google.protobuf.FieldDescriptorProto.Label\x12\x38\n\x04type\x18\x05 \x01(\x0e\x32*.google.protobuf.FieldDescriptorProto.Type\x12\x11\n\ttype_name\x18\x06 \x01(\t\x12\x10\n\x08\x65xtendee\x18\x02 \x01(\t\x12\x15\n\rdefault_value\x18\x07 \x01(\t\x12\x13\n\x0boneof_index\x18\t \x01(\x05\x12\x11\n\tjson_name\x18\n \x01(\t\x12.\n\x07options\x18\x08 \x01(\x0b\x32\x1d.google.protobuf.FieldOptions\x12\x17\n\x0fproto3_optional\x18\x11 \x01(\x08\"\xb6\x02\n\x04Type\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"C\n\x05Label\x12\x12\n\x0eLABEL_OPTIONAL\x10\x01\x12\x12\n\x0eLABEL_REQUIRED\x10\x02\x12\x12\n\x0eLABEL_REPEATED\x10\x03\"T\n\x14OneofDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\x07options\x18\x02 \x01(\x0b\x32\x1d.google.protobuf.OneofOptions\"\xa4\x02\n\x13\x45numDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x03(\x0b\x32).google.protobuf.EnumValueDescriptorProto\x12-\n\x07options\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.EnumOptions\x12N\n\x0ereserved_range\x18\x04 \x03(\x0b\x32\x36.google.protobuf.EnumDescriptorProto.EnumReservedRange\x12\x15\n\rreserved_name\x18\x05 \x03(\t\x1a/\n\x11\x45numReservedRange\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\"l\n\x18\x45numValueDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12\x32\n\x07options\x18\x03 \x01(\x0b\x32!.google.protobuf.EnumValueOptions\"\x90\x01\n\x16ServiceDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x36\n\x06method\x18\x02 \x03(\x0b\x32&.google.protobuf.MethodDescriptorProto\x12\x30\n\x07options\x18\x03 \x01(\x0b\x32\x1f.google.protobuf.ServiceOptions\"\xc1\x01\n\x15MethodDescriptorProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ninput_type\x18\x02 \x01(\t\x12\x13\n\x0boutput_type\x18\x03 \x01(\t\x12/\n\x07options\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.MethodOptions\x12\x1f\n\x10\x63lient_streaming\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x10server_streaming\x18\x06 \x01(\x08:\x05\x66\x61lse\"\xa5\x06\n\x0b\x46ileOptions\x12\x14\n\x0cjava_package\x18\x01 \x01(\t\x12\x1c\n\x14java_outer_classname\x18\x08 \x01(\t\x12\"\n\x13java_multiple_files\x18\n \x01(\x08:\x05\x66\x61lse\x12)\n\x1djava_generate_equals_and_hash\x18\x14 \x01(\x08\x42\x02\x18\x01\x12%\n\x16java_string_check_utf8\x18\x1b \x01(\x08:\x05\x66\x61lse\x12\x46\n\x0coptimize_for\x18\t \x01(\x0e\x32).google.protobuf.FileOptions.OptimizeMode:\x05SPEED\x12\x12\n\ngo_package\x18\x0b \x01(\t\x12\"\n\x13\x63\x63_generic_services\x18\x10 \x01(\x08:\x05\x66\x61lse\x12$\n\x15java_generic_services\x18\x11 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13py_generic_services\x18\x12 \x01(\x08:\x05\x66\x61lse\x12#\n\x14php_generic_services\x18* \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x10\x63\x63_enable_arenas\x18\x1f \x01(\x08:\x04true\x12\x19\n\x11objc_class_prefix\x18$ \x01(\t\x12\x18\n\x10\x63sharp_namespace\x18% \x01(\t\x12\x14\n\x0cswift_prefix\x18\' \x01(\t\x12\x18\n\x10php_class_prefix\x18( \x01(\t\x12\x15\n\rphp_namespace\x18) \x01(\t\x12\x1e\n\x16php_metadata_namespace\x18, \x01(\t\x12\x14\n\x0cruby_package\x18- \x01(\t\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\":\n\x0cOptimizeMode\x12\t\n\x05SPEED\x10\x01\x12\r\n\tCODE_SIZE\x10\x02\x12\x10\n\x0cLITE_RUNTIME\x10\x03*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08&\x10\'\"\x84\x02\n\x0eMessageOptions\x12&\n\x17message_set_wire_format\x18\x01 \x01(\x08:\x05\x66\x61lse\x12.\n\x1fno_standard_descriptor_accessor\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x11\n\tmap_entry\x18\x07 \x01(\x08\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\tJ\x04\x08\t\x10\n\"\xbe\x03\n\x0c\x46ieldOptions\x12:\n\x05\x63type\x18\x01 \x01(\x0e\x32#.google.protobuf.FieldOptions.CType:\x06STRING\x12\x0e\n\x06packed\x18\x02 \x01(\x08\x12?\n\x06jstype\x18\x06 \x01(\x0e\x32$.google.protobuf.FieldOptions.JSType:\tJS_NORMAL\x12\x13\n\x04lazy\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x0funverified_lazy\x18\x0f \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x13\n\x04weak\x18\n \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"/\n\x05\x43Type\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x43ORD\x10\x01\x12\x10\n\x0cSTRING_PIECE\x10\x02\"5\n\x06JSType\x12\r\n\tJS_NORMAL\x10\x00\x12\r\n\tJS_STRING\x10\x01\x12\r\n\tJS_NUMBER\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x04\x10\x05\"^\n\x0cOneofOptions\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x93\x01\n\x0b\x45numOptions\x12\x13\n\x0b\x61llow_alias\x18\x02 \x01(\x08\x12\x19\n\ndeprecated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02J\x04\x08\x05\x10\x06\"}\n\x10\x45numValueOptions\x12\x19\n\ndeprecated\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"{\n\x0eServiceOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xad\x02\n\rMethodOptions\x12\x19\n\ndeprecated\x18! \x01(\x08:\x05\x66\x61lse\x12_\n\x11idempotency_level\x18\" \x01(\x0e\x32/.google.protobuf.MethodOptions.IdempotencyLevel:\x13IDEMPOTENCY_UNKNOWN\x12\x43\n\x14uninterpreted_option\x18\xe7\x07 \x03(\x0b\x32$.google.protobuf.UninterpretedOption\"P\n\x10IdempotencyLevel\x12\x17\n\x13IDEMPOTENCY_UNKNOWN\x10\x00\x12\x13\n\x0fNO_SIDE_EFFECTS\x10\x01\x12\x0e\n\nIDEMPOTENT\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\x9e\x02\n\x13UninterpretedOption\x12;\n\x04name\x18\x02 \x03(\x0b\x32-.google.protobuf.UninterpretedOption.NamePart\x12\x18\n\x10identifier_value\x18\x03 \x01(\t\x12\x1a\n\x12positive_int_value\x18\x04 \x01(\x04\x12\x1a\n\x12negative_int_value\x18\x05 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x06 \x01(\x01\x12\x14\n\x0cstring_value\x18\x07 \x01(\x0c\x12\x17\n\x0f\x61ggregate_value\x18\x08 \x01(\t\x1a\x33\n\x08NamePart\x12\x11\n\tname_part\x18\x01 \x02(\t\x12\x14\n\x0cis_extension\x18\x02 \x02(\x08\"\xd5\x01\n\x0eSourceCodeInfo\x12:\n\x08location\x18\x01 \x03(\x0b\x32(.google.protobuf.SourceCodeInfo.Location\x1a\x86\x01\n\x08Location\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x10\n\x04span\x18\x02 \x03(\x05\x42\x02\x10\x01\x12\x18\n\x10leading_comments\x18\x03 \x01(\t\x12\x19\n\x11trailing_comments\x18\x04 \x01(\t\x12!\n\x19leading_detached_comments\x18\x06 \x03(\t\"\xa7\x01\n\x11GeneratedCodeInfo\x12\x41\n\nannotation\x18\x01 \x03(\x0b\x32-.google.protobuf.GeneratedCodeInfo.Annotation\x1aO\n\nAnnotation\x12\x10\n\x04path\x18\x01 \x03(\x05\x42\x02\x10\x01\x12\x13\n\x0bsource_file\x18\x02 \x01(\t\x12\r\n\x05\x62\x65gin\x18\x03 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x04 \x01(\x05\x42~\n\x13\x63om.google.protobufB\x10\x44\x65scriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection') - -if _descriptor._USE_C_DESCRIPTORS == False: - _FIELDDESCRIPTORPROTO_TYPE = _descriptor.EnumDescriptor( - name='Type', - full_name='google.protobuf.FieldDescriptorProto.Type', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='TYPE_DOUBLE', index=0, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_FLOAT', index=1, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_INT64', index=2, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_UINT64', index=3, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_INT32', index=4, number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_FIXED64', index=5, number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_FIXED32', index=6, number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_BOOL', index=7, number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_STRING', index=8, number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_GROUP', index=9, number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_MESSAGE', index=10, number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_BYTES', index=11, number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_UINT32', index=12, number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_ENUM', index=13, number=14, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_SFIXED32', index=14, number=15, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_SFIXED64', index=15, number=16, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_SINT32', index=16, number=17, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPE_SINT64', index=17, number=18, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_TYPE) - - _FIELDDESCRIPTORPROTO_LABEL = _descriptor.EnumDescriptor( - name='Label', - full_name='google.protobuf.FieldDescriptorProto.Label', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='LABEL_OPTIONAL', index=0, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='LABEL_REQUIRED', index=1, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='LABEL_REPEATED', index=2, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_FIELDDESCRIPTORPROTO_LABEL) - - _FILEOPTIONS_OPTIMIZEMODE = _descriptor.EnumDescriptor( - name='OptimizeMode', - full_name='google.protobuf.FileOptions.OptimizeMode', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='SPEED', index=0, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='CODE_SIZE', index=1, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='LITE_RUNTIME', index=2, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_FILEOPTIONS_OPTIMIZEMODE) - - _FIELDOPTIONS_CTYPE = _descriptor.EnumDescriptor( - name='CType', - full_name='google.protobuf.FieldOptions.CType', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='STRING', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='CORD', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='STRING_PIECE', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_CTYPE) - - _FIELDOPTIONS_JSTYPE = _descriptor.EnumDescriptor( - name='JSType', - full_name='google.protobuf.FieldOptions.JSType', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='JS_NORMAL', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='JS_STRING', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='JS_NUMBER', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_FIELDOPTIONS_JSTYPE) - - _METHODOPTIONS_IDEMPOTENCYLEVEL = _descriptor.EnumDescriptor( - name='IdempotencyLevel', - full_name='google.protobuf.MethodOptions.IdempotencyLevel', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='IDEMPOTENCY_UNKNOWN', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='NO_SIDE_EFFECTS', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='IDEMPOTENT', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - ) - _sym_db.RegisterEnumDescriptor(_METHODOPTIONS_IDEMPOTENCYLEVEL) - - - _FILEDESCRIPTORSET = _descriptor.Descriptor( - name='FileDescriptorSet', - full_name='google.protobuf.FileDescriptorSet', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='file', full_name='google.protobuf.FileDescriptorSet.file', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _FILEDESCRIPTORPROTO = _descriptor.Descriptor( - name='FileDescriptorProto', - full_name='google.protobuf.FileDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.FileDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='package', full_name='google.protobuf.FileDescriptorProto.package', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='dependency', full_name='google.protobuf.FileDescriptorProto.dependency', index=2, - number=3, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='public_dependency', full_name='google.protobuf.FileDescriptorProto.public_dependency', index=3, - number=10, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weak_dependency', full_name='google.protobuf.FileDescriptorProto.weak_dependency', index=4, - number=11, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='message_type', full_name='google.protobuf.FileDescriptorProto.message_type', index=5, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='enum_type', full_name='google.protobuf.FileDescriptorProto.enum_type', index=6, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='service', full_name='google.protobuf.FileDescriptorProto.service', index=7, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extension', full_name='google.protobuf.FileDescriptorProto.extension', index=8, - number=7, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.FileDescriptorProto.options', index=9, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='source_code_info', full_name='google.protobuf.FileDescriptorProto.source_code_info', index=10, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='syntax', full_name='google.protobuf.FileDescriptorProto.syntax', index=11, - number=12, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _DESCRIPTORPROTO_EXTENSIONRANGE = _descriptor.Descriptor( - name='ExtensionRange', - full_name='google.protobuf.DescriptorProto.ExtensionRange', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='start', full_name='google.protobuf.DescriptorProto.ExtensionRange.start', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='end', full_name='google.protobuf.DescriptorProto.ExtensionRange.end', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.DescriptorProto.ExtensionRange.options', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _DESCRIPTORPROTO_RESERVEDRANGE = _descriptor.Descriptor( - name='ReservedRange', - full_name='google.protobuf.DescriptorProto.ReservedRange', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='start', full_name='google.protobuf.DescriptorProto.ReservedRange.start', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='end', full_name='google.protobuf.DescriptorProto.ReservedRange.end', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _DESCRIPTORPROTO = _descriptor.Descriptor( - name='DescriptorProto', - full_name='google.protobuf.DescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.DescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='field', full_name='google.protobuf.DescriptorProto.field', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extension', full_name='google.protobuf.DescriptorProto.extension', index=2, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='nested_type', full_name='google.protobuf.DescriptorProto.nested_type', index=3, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='enum_type', full_name='google.protobuf.DescriptorProto.enum_type', index=4, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extension_range', full_name='google.protobuf.DescriptorProto.extension_range', index=5, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='oneof_decl', full_name='google.protobuf.DescriptorProto.oneof_decl', index=6, - number=8, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.DescriptorProto.options', index=7, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='reserved_range', full_name='google.protobuf.DescriptorProto.reserved_range', index=8, - number=9, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='reserved_name', full_name='google.protobuf.DescriptorProto.reserved_name', index=9, - number=10, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_DESCRIPTORPROTO_EXTENSIONRANGE, _DESCRIPTORPROTO_RESERVEDRANGE, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _EXTENSIONRANGEOPTIONS = _descriptor.Descriptor( - name='ExtensionRangeOptions', - full_name='google.protobuf.ExtensionRangeOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.ExtensionRangeOptions.uninterpreted_option', index=0, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _FIELDDESCRIPTORPROTO = _descriptor.Descriptor( - name='FieldDescriptorProto', - full_name='google.protobuf.FieldDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.FieldDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='number', full_name='google.protobuf.FieldDescriptorProto.number', index=1, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='label', full_name='google.protobuf.FieldDescriptorProto.label', index=2, - number=4, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=1, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type', full_name='google.protobuf.FieldDescriptorProto.type', index=3, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=1, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type_name', full_name='google.protobuf.FieldDescriptorProto.type_name', index=4, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extendee', full_name='google.protobuf.FieldDescriptorProto.extendee', index=5, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='default_value', full_name='google.protobuf.FieldDescriptorProto.default_value', index=6, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='oneof_index', full_name='google.protobuf.FieldDescriptorProto.oneof_index', index=7, - number=9, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='json_name', full_name='google.protobuf.FieldDescriptorProto.json_name', index=8, - number=10, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.FieldDescriptorProto.options', index=9, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='proto3_optional', full_name='google.protobuf.FieldDescriptorProto.proto3_optional', index=10, - number=17, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _FIELDDESCRIPTORPROTO_TYPE, - _FIELDDESCRIPTORPROTO_LABEL, - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _ONEOFDESCRIPTORPROTO = _descriptor.Descriptor( - name='OneofDescriptorProto', - full_name='google.protobuf.OneofDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.OneofDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.OneofDescriptorProto.options', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE = _descriptor.Descriptor( - name='EnumReservedRange', - full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='start', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.start', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='end', full_name='google.protobuf.EnumDescriptorProto.EnumReservedRange.end', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _ENUMDESCRIPTORPROTO = _descriptor.Descriptor( - name='EnumDescriptorProto', - full_name='google.protobuf.EnumDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.EnumDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='value', full_name='google.protobuf.EnumDescriptorProto.value', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.EnumDescriptorProto.options', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='reserved_range', full_name='google.protobuf.EnumDescriptorProto.reserved_range', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='reserved_name', full_name='google.protobuf.EnumDescriptorProto.reserved_name', index=4, - number=5, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _ENUMVALUEDESCRIPTORPROTO = _descriptor.Descriptor( - name='EnumValueDescriptorProto', - full_name='google.protobuf.EnumValueDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.EnumValueDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='number', full_name='google.protobuf.EnumValueDescriptorProto.number', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.EnumValueDescriptorProto.options', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _SERVICEDESCRIPTORPROTO = _descriptor.Descriptor( - name='ServiceDescriptorProto', - full_name='google.protobuf.ServiceDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.ServiceDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='method', full_name='google.protobuf.ServiceDescriptorProto.method', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.ServiceDescriptorProto.options', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _METHODDESCRIPTORPROTO = _descriptor.Descriptor( - name='MethodDescriptorProto', - full_name='google.protobuf.MethodDescriptorProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.MethodDescriptorProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='input_type', full_name='google.protobuf.MethodDescriptorProto.input_type', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='output_type', full_name='google.protobuf.MethodDescriptorProto.output_type', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='options', full_name='google.protobuf.MethodDescriptorProto.options', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='client_streaming', full_name='google.protobuf.MethodDescriptorProto.client_streaming', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='server_streaming', full_name='google.protobuf.MethodDescriptorProto.server_streaming', index=5, - number=6, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _FILEOPTIONS = _descriptor.Descriptor( - name='FileOptions', - full_name='google.protobuf.FileOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='java_package', full_name='google.protobuf.FileOptions.java_package', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='java_outer_classname', full_name='google.protobuf.FileOptions.java_outer_classname', index=1, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='java_multiple_files', full_name='google.protobuf.FileOptions.java_multiple_files', index=2, - number=10, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='java_generate_equals_and_hash', full_name='google.protobuf.FileOptions.java_generate_equals_and_hash', index=3, - number=20, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='java_string_check_utf8', full_name='google.protobuf.FileOptions.java_string_check_utf8', index=4, - number=27, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='optimize_for', full_name='google.protobuf.FileOptions.optimize_for', index=5, - number=9, type=14, cpp_type=8, label=1, - has_default_value=True, default_value=1, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='go_package', full_name='google.protobuf.FileOptions.go_package', index=6, - number=11, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='cc_generic_services', full_name='google.protobuf.FileOptions.cc_generic_services', index=7, - number=16, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='java_generic_services', full_name='google.protobuf.FileOptions.java_generic_services', index=8, - number=17, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='py_generic_services', full_name='google.protobuf.FileOptions.py_generic_services', index=9, - number=18, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='php_generic_services', full_name='google.protobuf.FileOptions.php_generic_services', index=10, - number=42, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.FileOptions.deprecated', index=11, - number=23, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='cc_enable_arenas', full_name='google.protobuf.FileOptions.cc_enable_arenas', index=12, - number=31, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=True, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='objc_class_prefix', full_name='google.protobuf.FileOptions.objc_class_prefix', index=13, - number=36, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='csharp_namespace', full_name='google.protobuf.FileOptions.csharp_namespace', index=14, - number=37, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='swift_prefix', full_name='google.protobuf.FileOptions.swift_prefix', index=15, - number=39, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='php_class_prefix', full_name='google.protobuf.FileOptions.php_class_prefix', index=16, - number=40, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='php_namespace', full_name='google.protobuf.FileOptions.php_namespace', index=17, - number=41, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='php_metadata_namespace', full_name='google.protobuf.FileOptions.php_metadata_namespace', index=18, - number=44, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='ruby_package', full_name='google.protobuf.FileOptions.ruby_package', index=19, - number=45, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.FileOptions.uninterpreted_option', index=20, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _FILEOPTIONS_OPTIMIZEMODE, - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _MESSAGEOPTIONS = _descriptor.Descriptor( - name='MessageOptions', - full_name='google.protobuf.MessageOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='message_set_wire_format', full_name='google.protobuf.MessageOptions.message_set_wire_format', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='no_standard_descriptor_accessor', full_name='google.protobuf.MessageOptions.no_standard_descriptor_accessor', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.MessageOptions.deprecated', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='map_entry', full_name='google.protobuf.MessageOptions.map_entry', index=3, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.MessageOptions.uninterpreted_option', index=4, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _FIELDOPTIONS = _descriptor.Descriptor( - name='FieldOptions', - full_name='google.protobuf.FieldOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='ctype', full_name='google.protobuf.FieldOptions.ctype', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=True, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='packed', full_name='google.protobuf.FieldOptions.packed', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='jstype', full_name='google.protobuf.FieldOptions.jstype', index=2, - number=6, type=14, cpp_type=8, label=1, - has_default_value=True, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='lazy', full_name='google.protobuf.FieldOptions.lazy', index=3, - number=5, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='unverified_lazy', full_name='google.protobuf.FieldOptions.unverified_lazy', index=4, - number=15, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.FieldOptions.deprecated', index=5, - number=3, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='weak', full_name='google.protobuf.FieldOptions.weak', index=6, - number=10, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.FieldOptions.uninterpreted_option', index=7, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _FIELDOPTIONS_CTYPE, - _FIELDOPTIONS_JSTYPE, - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _ONEOFOPTIONS = _descriptor.Descriptor( - name='OneofOptions', - full_name='google.protobuf.OneofOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.OneofOptions.uninterpreted_option', index=0, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _ENUMOPTIONS = _descriptor.Descriptor( - name='EnumOptions', - full_name='google.protobuf.EnumOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='allow_alias', full_name='google.protobuf.EnumOptions.allow_alias', index=0, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.EnumOptions.deprecated', index=1, - number=3, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.EnumOptions.uninterpreted_option', index=2, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _ENUMVALUEOPTIONS = _descriptor.Descriptor( - name='EnumValueOptions', - full_name='google.protobuf.EnumValueOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.EnumValueOptions.deprecated', index=0, - number=1, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.EnumValueOptions.uninterpreted_option', index=1, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _SERVICEOPTIONS = _descriptor.Descriptor( - name='ServiceOptions', - full_name='google.protobuf.ServiceOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.ServiceOptions.deprecated', index=0, - number=33, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.ServiceOptions.uninterpreted_option', index=1, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _METHODOPTIONS = _descriptor.Descriptor( - name='MethodOptions', - full_name='google.protobuf.MethodOptions', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='deprecated', full_name='google.protobuf.MethodOptions.deprecated', index=0, - number=33, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='idempotency_level', full_name='google.protobuf.MethodOptions.idempotency_level', index=1, - number=34, type=14, cpp_type=8, label=1, - has_default_value=True, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='uninterpreted_option', full_name='google.protobuf.MethodOptions.uninterpreted_option', index=2, - number=999, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _METHODOPTIONS_IDEMPOTENCYLEVEL, - ], - serialized_options=None, - is_extendable=True, - syntax='proto2', - extension_ranges=[(1000, 536870912), ], - oneofs=[ - ], - ) - - - _UNINTERPRETEDOPTION_NAMEPART = _descriptor.Descriptor( - name='NamePart', - full_name='google.protobuf.UninterpretedOption.NamePart', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name_part', full_name='google.protobuf.UninterpretedOption.NamePart.name_part', index=0, - number=1, type=9, cpp_type=9, label=2, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='is_extension', full_name='google.protobuf.UninterpretedOption.NamePart.is_extension', index=1, - number=2, type=8, cpp_type=7, label=2, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _UNINTERPRETEDOPTION = _descriptor.Descriptor( - name='UninterpretedOption', - full_name='google.protobuf.UninterpretedOption', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.protobuf.UninterpretedOption.name', index=0, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='identifier_value', full_name='google.protobuf.UninterpretedOption.identifier_value', index=1, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='positive_int_value', full_name='google.protobuf.UninterpretedOption.positive_int_value', index=2, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='negative_int_value', full_name='google.protobuf.UninterpretedOption.negative_int_value', index=3, - number=5, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='double_value', full_name='google.protobuf.UninterpretedOption.double_value', index=4, - number=6, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='string_value', full_name='google.protobuf.UninterpretedOption.string_value', index=5, - number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=b"", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='aggregate_value', full_name='google.protobuf.UninterpretedOption.aggregate_value', index=6, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_UNINTERPRETEDOPTION_NAMEPART, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _SOURCECODEINFO_LOCATION = _descriptor.Descriptor( - name='Location', - full_name='google.protobuf.SourceCodeInfo.Location', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='path', full_name='google.protobuf.SourceCodeInfo.Location.path', index=0, - number=1, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='span', full_name='google.protobuf.SourceCodeInfo.Location.span', index=1, - number=2, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='leading_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_comments', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='trailing_comments', full_name='google.protobuf.SourceCodeInfo.Location.trailing_comments', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='leading_detached_comments', full_name='google.protobuf.SourceCodeInfo.Location.leading_detached_comments', index=4, - number=6, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _SOURCECODEINFO = _descriptor.Descriptor( - name='SourceCodeInfo', - full_name='google.protobuf.SourceCodeInfo', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='location', full_name='google.protobuf.SourceCodeInfo.location', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_SOURCECODEINFO_LOCATION, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - - _GENERATEDCODEINFO_ANNOTATION = _descriptor.Descriptor( - name='Annotation', - full_name='google.protobuf.GeneratedCodeInfo.Annotation', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='path', full_name='google.protobuf.GeneratedCodeInfo.Annotation.path', index=0, - number=1, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='source_file', full_name='google.protobuf.GeneratedCodeInfo.Annotation.source_file', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='begin', full_name='google.protobuf.GeneratedCodeInfo.Annotation.begin', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='end', full_name='google.protobuf.GeneratedCodeInfo.Annotation.end', index=3, - number=4, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _GENERATEDCODEINFO = _descriptor.Descriptor( - name='GeneratedCodeInfo', - full_name='google.protobuf.GeneratedCodeInfo', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='annotation', full_name='google.protobuf.GeneratedCodeInfo.annotation', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_GENERATEDCODEINFO_ANNOTATION, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto2', - extension_ranges=[], - oneofs=[ - ], - ) - - _FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO - _FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO - _FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO - _FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO - _FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO - _FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS - _FILEDESCRIPTORPROTO.fields_by_name['source_code_info'].message_type = _SOURCECODEINFO - _DESCRIPTORPROTO_EXTENSIONRANGE.fields_by_name['options'].message_type = _EXTENSIONRANGEOPTIONS - _DESCRIPTORPROTO_EXTENSIONRANGE.containing_type = _DESCRIPTORPROTO - _DESCRIPTORPROTO_RESERVEDRANGE.containing_type = _DESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE - _DESCRIPTORPROTO.fields_by_name['oneof_decl'].message_type = _ONEOFDESCRIPTORPROTO - _DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS - _DESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _DESCRIPTORPROTO_RESERVEDRANGE - _EXTENSIONRANGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL - _FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE - _FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS - _FIELDDESCRIPTORPROTO_TYPE.containing_type = _FIELDDESCRIPTORPROTO - _FIELDDESCRIPTORPROTO_LABEL.containing_type = _FIELDDESCRIPTORPROTO - _ONEOFDESCRIPTORPROTO.fields_by_name['options'].message_type = _ONEOFOPTIONS - _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE.containing_type = _ENUMDESCRIPTORPROTO - _ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO - _ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS - _ENUMDESCRIPTORPROTO.fields_by_name['reserved_range'].message_type = _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE - _ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS - _SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO - _SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS - _METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS - _FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE - _FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _FILEOPTIONS_OPTIMIZEMODE.containing_type = _FILEOPTIONS - _MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE - _FIELDOPTIONS.fields_by_name['jstype'].enum_type = _FIELDOPTIONS_JSTYPE - _FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _FIELDOPTIONS_CTYPE.containing_type = _FIELDOPTIONS - _FIELDOPTIONS_JSTYPE.containing_type = _FIELDOPTIONS - _ONEOFOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _METHODOPTIONS.fields_by_name['idempotency_level'].enum_type = _METHODOPTIONS_IDEMPOTENCYLEVEL - _METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION - _METHODOPTIONS_IDEMPOTENCYLEVEL.containing_type = _METHODOPTIONS - _UNINTERPRETEDOPTION_NAMEPART.containing_type = _UNINTERPRETEDOPTION - _UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART - _SOURCECODEINFO_LOCATION.containing_type = _SOURCECODEINFO - _SOURCECODEINFO.fields_by_name['location'].message_type = _SOURCECODEINFO_LOCATION - _GENERATEDCODEINFO_ANNOTATION.containing_type = _GENERATEDCODEINFO - _GENERATEDCODEINFO.fields_by_name['annotation'].message_type = _GENERATEDCODEINFO_ANNOTATION - DESCRIPTOR.message_types_by_name['FileDescriptorSet'] = _FILEDESCRIPTORSET - DESCRIPTOR.message_types_by_name['FileDescriptorProto'] = _FILEDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['DescriptorProto'] = _DESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['ExtensionRangeOptions'] = _EXTENSIONRANGEOPTIONS - DESCRIPTOR.message_types_by_name['FieldDescriptorProto'] = _FIELDDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['OneofDescriptorProto'] = _ONEOFDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['EnumDescriptorProto'] = _ENUMDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['EnumValueDescriptorProto'] = _ENUMVALUEDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['ServiceDescriptorProto'] = _SERVICEDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['MethodDescriptorProto'] = _METHODDESCRIPTORPROTO - DESCRIPTOR.message_types_by_name['FileOptions'] = _FILEOPTIONS - DESCRIPTOR.message_types_by_name['MessageOptions'] = _MESSAGEOPTIONS - DESCRIPTOR.message_types_by_name['FieldOptions'] = _FIELDOPTIONS - DESCRIPTOR.message_types_by_name['OneofOptions'] = _ONEOFOPTIONS - DESCRIPTOR.message_types_by_name['EnumOptions'] = _ENUMOPTIONS - DESCRIPTOR.message_types_by_name['EnumValueOptions'] = _ENUMVALUEOPTIONS - DESCRIPTOR.message_types_by_name['ServiceOptions'] = _SERVICEOPTIONS - DESCRIPTOR.message_types_by_name['MethodOptions'] = _METHODOPTIONS - DESCRIPTOR.message_types_by_name['UninterpretedOption'] = _UNINTERPRETEDOPTION - DESCRIPTOR.message_types_by_name['SourceCodeInfo'] = _SOURCECODEINFO - DESCRIPTOR.message_types_by_name['GeneratedCodeInfo'] = _GENERATEDCODEINFO - _sym_db.RegisterFileDescriptor(DESCRIPTOR) - -else: - _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.descriptor_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _FILEDESCRIPTORSET._serialized_start=53 - _FILEDESCRIPTORSET._serialized_end=124 - _FILEDESCRIPTORPROTO._serialized_start=127 - _FILEDESCRIPTORPROTO._serialized_end=602 - _DESCRIPTORPROTO._serialized_start=605 - _DESCRIPTORPROTO._serialized_end=1286 - _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_start=1140 - _DESCRIPTORPROTO_EXTENSIONRANGE._serialized_end=1241 - _DESCRIPTORPROTO_RESERVEDRANGE._serialized_start=1243 - _DESCRIPTORPROTO_RESERVEDRANGE._serialized_end=1286 - _EXTENSIONRANGEOPTIONS._serialized_start=1288 - _EXTENSIONRANGEOPTIONS._serialized_end=1391 - _FIELDDESCRIPTORPROTO._serialized_start=1394 - _FIELDDESCRIPTORPROTO._serialized_end=2119 - _FIELDDESCRIPTORPROTO_TYPE._serialized_start=1740 - _FIELDDESCRIPTORPROTO_TYPE._serialized_end=2050 - _FIELDDESCRIPTORPROTO_LABEL._serialized_start=2052 - _FIELDDESCRIPTORPROTO_LABEL._serialized_end=2119 - _ONEOFDESCRIPTORPROTO._serialized_start=2121 - _ONEOFDESCRIPTORPROTO._serialized_end=2205 - _ENUMDESCRIPTORPROTO._serialized_start=2208 - _ENUMDESCRIPTORPROTO._serialized_end=2500 - _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_start=2453 - _ENUMDESCRIPTORPROTO_ENUMRESERVEDRANGE._serialized_end=2500 - _ENUMVALUEDESCRIPTORPROTO._serialized_start=2502 - _ENUMVALUEDESCRIPTORPROTO._serialized_end=2610 - _SERVICEDESCRIPTORPROTO._serialized_start=2613 - _SERVICEDESCRIPTORPROTO._serialized_end=2757 - _METHODDESCRIPTORPROTO._serialized_start=2760 - _METHODDESCRIPTORPROTO._serialized_end=2953 - _FILEOPTIONS._serialized_start=2956 - _FILEOPTIONS._serialized_end=3761 - _FILEOPTIONS_OPTIMIZEMODE._serialized_start=3686 - _FILEOPTIONS_OPTIMIZEMODE._serialized_end=3744 - _MESSAGEOPTIONS._serialized_start=3764 - _MESSAGEOPTIONS._serialized_end=4024 - _FIELDOPTIONS._serialized_start=4027 - _FIELDOPTIONS._serialized_end=4473 - _FIELDOPTIONS_CTYPE._serialized_start=4354 - _FIELDOPTIONS_CTYPE._serialized_end=4401 - _FIELDOPTIONS_JSTYPE._serialized_start=4403 - _FIELDOPTIONS_JSTYPE._serialized_end=4456 - _ONEOFOPTIONS._serialized_start=4475 - _ONEOFOPTIONS._serialized_end=4569 - _ENUMOPTIONS._serialized_start=4572 - _ENUMOPTIONS._serialized_end=4719 - _ENUMVALUEOPTIONS._serialized_start=4721 - _ENUMVALUEOPTIONS._serialized_end=4846 - _SERVICEOPTIONS._serialized_start=4848 - _SERVICEOPTIONS._serialized_end=4971 - _METHODOPTIONS._serialized_start=4974 - _METHODOPTIONS._serialized_end=5275 - _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_start=5184 - _METHODOPTIONS_IDEMPOTENCYLEVEL._serialized_end=5264 - _UNINTERPRETEDOPTION._serialized_start=5278 - _UNINTERPRETEDOPTION._serialized_end=5564 - _UNINTERPRETEDOPTION_NAMEPART._serialized_start=5513 - _UNINTERPRETEDOPTION_NAMEPART._serialized_end=5564 - _SOURCECODEINFO._serialized_start=5567 - _SOURCECODEINFO._serialized_end=5780 - _SOURCECODEINFO_LOCATION._serialized_start=5646 - _SOURCECODEINFO_LOCATION._serialized_end=5780 - _GENERATEDCODEINFO._serialized_start=5783 - _GENERATEDCODEINFO._serialized_end=5950 - _GENERATEDCODEINFO_ANNOTATION._serialized_start=5871 - _GENERATEDCODEINFO_ANNOTATION._serialized_end=5950 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pool.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pool.py deleted file mode 100644 index 911372a8b0..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/descriptor_pool.py +++ /dev/null @@ -1,1295 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Provides DescriptorPool to use as a container for proto2 descriptors. - -The DescriptorPool is used in conjection with a DescriptorDatabase to maintain -a collection of protocol buffer descriptors for use when dynamically creating -message types at runtime. - -For most applications protocol buffers should be used via modules generated by -the protocol buffer compiler tool. This should only be used when the type of -protocol buffers used in an application or library cannot be predetermined. - -Below is a straightforward example on how to use this class:: - - pool = DescriptorPool() - file_descriptor_protos = [ ... ] - for file_descriptor_proto in file_descriptor_protos: - pool.Add(file_descriptor_proto) - my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType') - -The message descriptor can be used in conjunction with the message_factory -module in order to create a protocol buffer class that can be encoded and -decoded. - -If you want to get a Python class for the specified proto, use the -helper functions inside google.protobuf.message_factory -directly instead of this class. -""" - -__author__ = 'matthewtoia@google.com (Matt Toia)' - -import collections -import warnings - -from google.protobuf import descriptor -from google.protobuf import descriptor_database -from google.protobuf import text_encoding - - -_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access - - -def _Deprecated(func): - """Mark functions as deprecated.""" - - def NewFunc(*args, **kwargs): - warnings.warn( - 'Call to deprecated function %s(). Note: Do add unlinked descriptors ' - 'to descriptor_pool is wrong. Use Add() or AddSerializedFile() ' - 'instead.' % func.__name__, - category=DeprecationWarning) - return func(*args, **kwargs) - NewFunc.__name__ = func.__name__ - NewFunc.__doc__ = func.__doc__ - NewFunc.__dict__.update(func.__dict__) - return NewFunc - - -def _NormalizeFullyQualifiedName(name): - """Remove leading period from fully-qualified type name. - - Due to b/13860351 in descriptor_database.py, types in the root namespace are - generated with a leading period. This function removes that prefix. - - Args: - name (str): The fully-qualified symbol name. - - Returns: - str: The normalized fully-qualified symbol name. - """ - return name.lstrip('.') - - -def _OptionsOrNone(descriptor_proto): - """Returns the value of the field `options`, or None if it is not set.""" - if descriptor_proto.HasField('options'): - return descriptor_proto.options - else: - return None - - -def _IsMessageSetExtension(field): - return (field.is_extension and - field.containing_type.has_options and - field.containing_type.GetOptions().message_set_wire_format and - field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and - field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL) - - -class DescriptorPool(object): - """A collection of protobufs dynamically constructed by descriptor protos.""" - - if _USE_C_DESCRIPTORS: - - def __new__(cls, descriptor_db=None): - # pylint: disable=protected-access - return descriptor._message.DescriptorPool(descriptor_db) - - def __init__(self, descriptor_db=None): - """Initializes a Pool of proto buffs. - - The descriptor_db argument to the constructor is provided to allow - specialized file descriptor proto lookup code to be triggered on demand. An - example would be an implementation which will read and compile a file - specified in a call to FindFileByName() and not require the call to Add() - at all. Results from this database will be cached internally here as well. - - Args: - descriptor_db: A secondary source of file descriptors. - """ - - self._internal_db = descriptor_database.DescriptorDatabase() - self._descriptor_db = descriptor_db - self._descriptors = {} - self._enum_descriptors = {} - self._service_descriptors = {} - self._file_descriptors = {} - self._toplevel_extensions = {} - # TODO(jieluo): Remove _file_desc_by_toplevel_extension after - # maybe year 2020 for compatibility issue (with 3.4.1 only). - self._file_desc_by_toplevel_extension = {} - self._top_enum_values = {} - # We store extensions in two two-level mappings: The first key is the - # descriptor of the message being extended, the second key is the extension - # full name or its tag number. - self._extensions_by_name = collections.defaultdict(dict) - self._extensions_by_number = collections.defaultdict(dict) - - def _CheckConflictRegister(self, desc, desc_name, file_name): - """Check if the descriptor name conflicts with another of the same name. - - Args: - desc: Descriptor of a message, enum, service, extension or enum value. - desc_name (str): the full name of desc. - file_name (str): The file name of descriptor. - """ - for register, descriptor_type in [ - (self._descriptors, descriptor.Descriptor), - (self._enum_descriptors, descriptor.EnumDescriptor), - (self._service_descriptors, descriptor.ServiceDescriptor), - (self._toplevel_extensions, descriptor.FieldDescriptor), - (self._top_enum_values, descriptor.EnumValueDescriptor)]: - if desc_name in register: - old_desc = register[desc_name] - if isinstance(old_desc, descriptor.EnumValueDescriptor): - old_file = old_desc.type.file.name - else: - old_file = old_desc.file.name - - if not isinstance(desc, descriptor_type) or ( - old_file != file_name): - error_msg = ('Conflict register for file "' + file_name + - '": ' + desc_name + - ' is already defined in file "' + - old_file + '". Please fix the conflict by adding ' - 'package name on the proto file, or use different ' - 'name for the duplication.') - if isinstance(desc, descriptor.EnumValueDescriptor): - error_msg += ('\nNote: enum values appear as ' - 'siblings of the enum type instead of ' - 'children of it.') - - raise TypeError(error_msg) - - return - - def Add(self, file_desc_proto): - """Adds the FileDescriptorProto and its types to this pool. - - Args: - file_desc_proto (FileDescriptorProto): The file descriptor to add. - """ - - self._internal_db.Add(file_desc_proto) - - def AddSerializedFile(self, serialized_file_desc_proto): - """Adds the FileDescriptorProto and its types to this pool. - - Args: - serialized_file_desc_proto (bytes): A bytes string, serialization of the - :class:`FileDescriptorProto` to add. - - Returns: - FileDescriptor: Descriptor for the added file. - """ - - # pylint: disable=g-import-not-at-top - from google.protobuf import descriptor_pb2 - file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( - serialized_file_desc_proto) - file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto) - file_desc.serialized_pb = serialized_file_desc_proto - return file_desc - - # Add Descriptor to descriptor pool is dreprecated. Please use Add() - # or AddSerializedFile() to add a FileDescriptorProto instead. - @_Deprecated - def AddDescriptor(self, desc): - self._AddDescriptor(desc) - - # Never call this method. It is for internal usage only. - def _AddDescriptor(self, desc): - """Adds a Descriptor to the pool, non-recursively. - - If the Descriptor contains nested messages or enums, the caller must - explicitly register them. This method also registers the FileDescriptor - associated with the message. - - Args: - desc: A Descriptor. - """ - if not isinstance(desc, descriptor.Descriptor): - raise TypeError('Expected instance of descriptor.Descriptor.') - - self._CheckConflictRegister(desc, desc.full_name, desc.file.name) - - self._descriptors[desc.full_name] = desc - self._AddFileDescriptor(desc.file) - - # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add() - # or AddSerializedFile() to add a FileDescriptorProto instead. - @_Deprecated - def AddEnumDescriptor(self, enum_desc): - self._AddEnumDescriptor(enum_desc) - - # Never call this method. It is for internal usage only. - def _AddEnumDescriptor(self, enum_desc): - """Adds an EnumDescriptor to the pool. - - This method also registers the FileDescriptor associated with the enum. - - Args: - enum_desc: An EnumDescriptor. - """ - - if not isinstance(enum_desc, descriptor.EnumDescriptor): - raise TypeError('Expected instance of descriptor.EnumDescriptor.') - - file_name = enum_desc.file.name - self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name) - self._enum_descriptors[enum_desc.full_name] = enum_desc - - # Top enum values need to be indexed. - # Count the number of dots to see whether the enum is toplevel or nested - # in a message. We cannot use enum_desc.containing_type at this stage. - if enum_desc.file.package: - top_level = (enum_desc.full_name.count('.') - - enum_desc.file.package.count('.') == 1) - else: - top_level = enum_desc.full_name.count('.') == 0 - if top_level: - file_name = enum_desc.file.name - package = enum_desc.file.package - for enum_value in enum_desc.values: - full_name = _NormalizeFullyQualifiedName( - '.'.join((package, enum_value.name))) - self._CheckConflictRegister(enum_value, full_name, file_name) - self._top_enum_values[full_name] = enum_value - self._AddFileDescriptor(enum_desc.file) - - # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add() - # or AddSerializedFile() to add a FileDescriptorProto instead. - @_Deprecated - def AddServiceDescriptor(self, service_desc): - self._AddServiceDescriptor(service_desc) - - # Never call this method. It is for internal usage only. - def _AddServiceDescriptor(self, service_desc): - """Adds a ServiceDescriptor to the pool. - - Args: - service_desc: A ServiceDescriptor. - """ - - if not isinstance(service_desc, descriptor.ServiceDescriptor): - raise TypeError('Expected instance of descriptor.ServiceDescriptor.') - - self._CheckConflictRegister(service_desc, service_desc.full_name, - service_desc.file.name) - self._service_descriptors[service_desc.full_name] = service_desc - - # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add() - # or AddSerializedFile() to add a FileDescriptorProto instead. - @_Deprecated - def AddExtensionDescriptor(self, extension): - self._AddExtensionDescriptor(extension) - - # Never call this method. It is for internal usage only. - def _AddExtensionDescriptor(self, extension): - """Adds a FieldDescriptor describing an extension to the pool. - - Args: - extension: A FieldDescriptor. - - Raises: - AssertionError: when another extension with the same number extends the - same message. - TypeError: when the specified extension is not a - descriptor.FieldDescriptor. - """ - if not (isinstance(extension, descriptor.FieldDescriptor) and - extension.is_extension): - raise TypeError('Expected an extension descriptor.') - - if extension.extension_scope is None: - self._toplevel_extensions[extension.full_name] = extension - - try: - existing_desc = self._extensions_by_number[ - extension.containing_type][extension.number] - except KeyError: - pass - else: - if extension is not existing_desc: - raise AssertionError( - 'Extensions "%s" and "%s" both try to extend message type "%s" ' - 'with field number %d.' % - (extension.full_name, existing_desc.full_name, - extension.containing_type.full_name, extension.number)) - - self._extensions_by_number[extension.containing_type][ - extension.number] = extension - self._extensions_by_name[extension.containing_type][ - extension.full_name] = extension - - # Also register MessageSet extensions with the type name. - if _IsMessageSetExtension(extension): - self._extensions_by_name[extension.containing_type][ - extension.message_type.full_name] = extension - - @_Deprecated - def AddFileDescriptor(self, file_desc): - self._InternalAddFileDescriptor(file_desc) - - # Never call this method. It is for internal usage only. - def _InternalAddFileDescriptor(self, file_desc): - """Adds a FileDescriptor to the pool, non-recursively. - - If the FileDescriptor contains messages or enums, the caller must explicitly - register them. - - Args: - file_desc: A FileDescriptor. - """ - - self._AddFileDescriptor(file_desc) - # TODO(jieluo): This is a temporary solution for FieldDescriptor.file. - # FieldDescriptor.file is added in code gen. Remove this solution after - # maybe 2020 for compatibility reason (with 3.4.1 only). - for extension in file_desc.extensions_by_name.values(): - self._file_desc_by_toplevel_extension[ - extension.full_name] = file_desc - - def _AddFileDescriptor(self, file_desc): - """Adds a FileDescriptor to the pool, non-recursively. - - If the FileDescriptor contains messages or enums, the caller must explicitly - register them. - - Args: - file_desc: A FileDescriptor. - """ - - if not isinstance(file_desc, descriptor.FileDescriptor): - raise TypeError('Expected instance of descriptor.FileDescriptor.') - self._file_descriptors[file_desc.name] = file_desc - - def FindFileByName(self, file_name): - """Gets a FileDescriptor by file name. - - Args: - file_name (str): The path to the file to get a descriptor for. - - Returns: - FileDescriptor: The descriptor for the named file. - - Raises: - KeyError: if the file cannot be found in the pool. - """ - - try: - return self._file_descriptors[file_name] - except KeyError: - pass - - try: - file_proto = self._internal_db.FindFileByName(file_name) - except KeyError as error: - if self._descriptor_db: - file_proto = self._descriptor_db.FindFileByName(file_name) - else: - raise error - if not file_proto: - raise KeyError('Cannot find a file named %s' % file_name) - return self._ConvertFileProtoToFileDescriptor(file_proto) - - def FindFileContainingSymbol(self, symbol): - """Gets the FileDescriptor for the file containing the specified symbol. - - Args: - symbol (str): The name of the symbol to search for. - - Returns: - FileDescriptor: Descriptor for the file that contains the specified - symbol. - - Raises: - KeyError: if the file cannot be found in the pool. - """ - - symbol = _NormalizeFullyQualifiedName(symbol) - try: - return self._InternalFindFileContainingSymbol(symbol) - except KeyError: - pass - - try: - # Try fallback database. Build and find again if possible. - self._FindFileContainingSymbolInDb(symbol) - return self._InternalFindFileContainingSymbol(symbol) - except KeyError: - raise KeyError('Cannot find a file containing %s' % symbol) - - def _InternalFindFileContainingSymbol(self, symbol): - """Gets the already built FileDescriptor containing the specified symbol. - - Args: - symbol (str): The name of the symbol to search for. - - Returns: - FileDescriptor: Descriptor for the file that contains the specified - symbol. - - Raises: - KeyError: if the file cannot be found in the pool. - """ - try: - return self._descriptors[symbol].file - except KeyError: - pass - - try: - return self._enum_descriptors[symbol].file - except KeyError: - pass - - try: - return self._service_descriptors[symbol].file - except KeyError: - pass - - try: - return self._top_enum_values[symbol].type.file - except KeyError: - pass - - try: - return self._file_desc_by_toplevel_extension[symbol] - except KeyError: - pass - - # Try fields, enum values and nested extensions inside a message. - top_name, _, sub_name = symbol.rpartition('.') - try: - message = self.FindMessageTypeByName(top_name) - assert (sub_name in message.extensions_by_name or - sub_name in message.fields_by_name or - sub_name in message.enum_values_by_name) - return message.file - except (KeyError, AssertionError): - raise KeyError('Cannot find a file containing %s' % symbol) - - def FindMessageTypeByName(self, full_name): - """Loads the named descriptor from the pool. - - Args: - full_name (str): The full name of the descriptor to load. - - Returns: - Descriptor: The descriptor for the named type. - - Raises: - KeyError: if the message cannot be found in the pool. - """ - - full_name = _NormalizeFullyQualifiedName(full_name) - if full_name not in self._descriptors: - self._FindFileContainingSymbolInDb(full_name) - return self._descriptors[full_name] - - def FindEnumTypeByName(self, full_name): - """Loads the named enum descriptor from the pool. - - Args: - full_name (str): The full name of the enum descriptor to load. - - Returns: - EnumDescriptor: The enum descriptor for the named type. - - Raises: - KeyError: if the enum cannot be found in the pool. - """ - - full_name = _NormalizeFullyQualifiedName(full_name) - if full_name not in self._enum_descriptors: - self._FindFileContainingSymbolInDb(full_name) - return self._enum_descriptors[full_name] - - def FindFieldByName(self, full_name): - """Loads the named field descriptor from the pool. - - Args: - full_name (str): The full name of the field descriptor to load. - - Returns: - FieldDescriptor: The field descriptor for the named field. - - Raises: - KeyError: if the field cannot be found in the pool. - """ - full_name = _NormalizeFullyQualifiedName(full_name) - message_name, _, field_name = full_name.rpartition('.') - message_descriptor = self.FindMessageTypeByName(message_name) - return message_descriptor.fields_by_name[field_name] - - def FindOneofByName(self, full_name): - """Loads the named oneof descriptor from the pool. - - Args: - full_name (str): The full name of the oneof descriptor to load. - - Returns: - OneofDescriptor: The oneof descriptor for the named oneof. - - Raises: - KeyError: if the oneof cannot be found in the pool. - """ - full_name = _NormalizeFullyQualifiedName(full_name) - message_name, _, oneof_name = full_name.rpartition('.') - message_descriptor = self.FindMessageTypeByName(message_name) - return message_descriptor.oneofs_by_name[oneof_name] - - def FindExtensionByName(self, full_name): - """Loads the named extension descriptor from the pool. - - Args: - full_name (str): The full name of the extension descriptor to load. - - Returns: - FieldDescriptor: The field descriptor for the named extension. - - Raises: - KeyError: if the extension cannot be found in the pool. - """ - full_name = _NormalizeFullyQualifiedName(full_name) - try: - # The proto compiler does not give any link between the FileDescriptor - # and top-level extensions unless the FileDescriptorProto is added to - # the DescriptorDatabase, but this can impact memory usage. - # So we registered these extensions by name explicitly. - return self._toplevel_extensions[full_name] - except KeyError: - pass - message_name, _, extension_name = full_name.rpartition('.') - try: - # Most extensions are nested inside a message. - scope = self.FindMessageTypeByName(message_name) - except KeyError: - # Some extensions are defined at file scope. - scope = self._FindFileContainingSymbolInDb(full_name) - return scope.extensions_by_name[extension_name] - - def FindExtensionByNumber(self, message_descriptor, number): - """Gets the extension of the specified message with the specified number. - - Extensions have to be registered to this pool by calling :func:`Add` or - :func:`AddExtensionDescriptor`. - - Args: - message_descriptor (Descriptor): descriptor of the extended message. - number (int): Number of the extension field. - - Returns: - FieldDescriptor: The descriptor for the extension. - - Raises: - KeyError: when no extension with the given number is known for the - specified message. - """ - try: - return self._extensions_by_number[message_descriptor][number] - except KeyError: - self._TryLoadExtensionFromDB(message_descriptor, number) - return self._extensions_by_number[message_descriptor][number] - - def FindAllExtensions(self, message_descriptor): - """Gets all the known extensions of a given message. - - Extensions have to be registered to this pool by build related - :func:`Add` or :func:`AddExtensionDescriptor`. - - Args: - message_descriptor (Descriptor): Descriptor of the extended message. - - Returns: - list[FieldDescriptor]: Field descriptors describing the extensions. - """ - # Fallback to descriptor db if FindAllExtensionNumbers is provided. - if self._descriptor_db and hasattr( - self._descriptor_db, 'FindAllExtensionNumbers'): - full_name = message_descriptor.full_name - all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name) - for number in all_numbers: - if number in self._extensions_by_number[message_descriptor]: - continue - self._TryLoadExtensionFromDB(message_descriptor, number) - - return list(self._extensions_by_number[message_descriptor].values()) - - def _TryLoadExtensionFromDB(self, message_descriptor, number): - """Try to Load extensions from descriptor db. - - Args: - message_descriptor: descriptor of the extended message. - number: the extension number that needs to be loaded. - """ - if not self._descriptor_db: - return - # Only supported when FindFileContainingExtension is provided. - if not hasattr( - self._descriptor_db, 'FindFileContainingExtension'): - return - - full_name = message_descriptor.full_name - file_proto = self._descriptor_db.FindFileContainingExtension( - full_name, number) - - if file_proto is None: - return - - try: - self._ConvertFileProtoToFileDescriptor(file_proto) - except: - warn_msg = ('Unable to load proto file %s for extension number %d.' % - (file_proto.name, number)) - warnings.warn(warn_msg, RuntimeWarning) - - def FindServiceByName(self, full_name): - """Loads the named service descriptor from the pool. - - Args: - full_name (str): The full name of the service descriptor to load. - - Returns: - ServiceDescriptor: The service descriptor for the named service. - - Raises: - KeyError: if the service cannot be found in the pool. - """ - full_name = _NormalizeFullyQualifiedName(full_name) - if full_name not in self._service_descriptors: - self._FindFileContainingSymbolInDb(full_name) - return self._service_descriptors[full_name] - - def FindMethodByName(self, full_name): - """Loads the named service method descriptor from the pool. - - Args: - full_name (str): The full name of the method descriptor to load. - - Returns: - MethodDescriptor: The method descriptor for the service method. - - Raises: - KeyError: if the method cannot be found in the pool. - """ - full_name = _NormalizeFullyQualifiedName(full_name) - service_name, _, method_name = full_name.rpartition('.') - service_descriptor = self.FindServiceByName(service_name) - return service_descriptor.methods_by_name[method_name] - - def _FindFileContainingSymbolInDb(self, symbol): - """Finds the file in descriptor DB containing the specified symbol. - - Args: - symbol (str): The name of the symbol to search for. - - Returns: - FileDescriptor: The file that contains the specified symbol. - - Raises: - KeyError: if the file cannot be found in the descriptor database. - """ - try: - file_proto = self._internal_db.FindFileContainingSymbol(symbol) - except KeyError as error: - if self._descriptor_db: - file_proto = self._descriptor_db.FindFileContainingSymbol(symbol) - else: - raise error - if not file_proto: - raise KeyError('Cannot find a file containing %s' % symbol) - return self._ConvertFileProtoToFileDescriptor(file_proto) - - def _ConvertFileProtoToFileDescriptor(self, file_proto): - """Creates a FileDescriptor from a proto or returns a cached copy. - - This method also has the side effect of loading all the symbols found in - the file into the appropriate dictionaries in the pool. - - Args: - file_proto: The proto to convert. - - Returns: - A FileDescriptor matching the passed in proto. - """ - if file_proto.name not in self._file_descriptors: - built_deps = list(self._GetDeps(file_proto.dependency)) - direct_deps = [self.FindFileByName(n) for n in file_proto.dependency] - public_deps = [direct_deps[i] for i in file_proto.public_dependency] - - file_descriptor = descriptor.FileDescriptor( - pool=self, - name=file_proto.name, - package=file_proto.package, - syntax=file_proto.syntax, - options=_OptionsOrNone(file_proto), - serialized_pb=file_proto.SerializeToString(), - dependencies=direct_deps, - public_dependencies=public_deps, - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - scope = {} - - # This loop extracts all the message and enum types from all the - # dependencies of the file_proto. This is necessary to create the - # scope of available message types when defining the passed in - # file proto. - for dependency in built_deps: - scope.update(self._ExtractSymbols( - dependency.message_types_by_name.values())) - scope.update((_PrefixWithDot(enum.full_name), enum) - for enum in dependency.enum_types_by_name.values()) - - for message_type in file_proto.message_type: - message_desc = self._ConvertMessageDescriptor( - message_type, file_proto.package, file_descriptor, scope, - file_proto.syntax) - file_descriptor.message_types_by_name[message_desc.name] = ( - message_desc) - - for enum_type in file_proto.enum_type: - file_descriptor.enum_types_by_name[enum_type.name] = ( - self._ConvertEnumDescriptor(enum_type, file_proto.package, - file_descriptor, None, scope, True)) - - for index, extension_proto in enumerate(file_proto.extension): - extension_desc = self._MakeFieldDescriptor( - extension_proto, file_proto.package, index, file_descriptor, - is_extension=True) - extension_desc.containing_type = self._GetTypeFromScope( - file_descriptor.package, extension_proto.extendee, scope) - self._SetFieldType(extension_proto, extension_desc, - file_descriptor.package, scope) - file_descriptor.extensions_by_name[extension_desc.name] = ( - extension_desc) - self._file_desc_by_toplevel_extension[extension_desc.full_name] = ( - file_descriptor) - - for desc_proto in file_proto.message_type: - self._SetAllFieldTypes(file_proto.package, desc_proto, scope) - - if file_proto.package: - desc_proto_prefix = _PrefixWithDot(file_proto.package) - else: - desc_proto_prefix = '' - - for desc_proto in file_proto.message_type: - desc = self._GetTypeFromScope( - desc_proto_prefix, desc_proto.name, scope) - file_descriptor.message_types_by_name[desc_proto.name] = desc - - for index, service_proto in enumerate(file_proto.service): - file_descriptor.services_by_name[service_proto.name] = ( - self._MakeServiceDescriptor(service_proto, index, scope, - file_proto.package, file_descriptor)) - - self._file_descriptors[file_proto.name] = file_descriptor - - # Add extensions to the pool - file_desc = self._file_descriptors[file_proto.name] - for extension in file_desc.extensions_by_name.values(): - self._AddExtensionDescriptor(extension) - for message_type in file_desc.message_types_by_name.values(): - for extension in message_type.extensions: - self._AddExtensionDescriptor(extension) - - return file_desc - - def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None, - scope=None, syntax=None): - """Adds the proto to the pool in the specified package. - - Args: - desc_proto: The descriptor_pb2.DescriptorProto protobuf message. - package: The package the proto should be located in. - file_desc: The file containing this message. - scope: Dict mapping short and full symbols to message and enum types. - syntax: string indicating syntax of the file ("proto2" or "proto3") - - Returns: - The added descriptor. - """ - - if package: - desc_name = '.'.join((package, desc_proto.name)) - else: - desc_name = desc_proto.name - - if file_desc is None: - file_name = None - else: - file_name = file_desc.name - - if scope is None: - scope = {} - - nested = [ - self._ConvertMessageDescriptor( - nested, desc_name, file_desc, scope, syntax) - for nested in desc_proto.nested_type] - enums = [ - self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, - scope, False) - for enum in desc_proto.enum_type] - fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc) - for index, field in enumerate(desc_proto.field)] - extensions = [ - self._MakeFieldDescriptor(extension, desc_name, index, file_desc, - is_extension=True) - for index, extension in enumerate(desc_proto.extension)] - oneofs = [ - # pylint: disable=g-complex-comprehension - descriptor.OneofDescriptor( - desc.name, - '.'.join((desc_name, desc.name)), - index, - None, - [], - _OptionsOrNone(desc), - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - for index, desc in enumerate(desc_proto.oneof_decl) - ] - extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range] - if extension_ranges: - is_extendable = True - else: - is_extendable = False - desc = descriptor.Descriptor( - name=desc_proto.name, - full_name=desc_name, - filename=file_name, - containing_type=None, - fields=fields, - oneofs=oneofs, - nested_types=nested, - enum_types=enums, - extensions=extensions, - options=_OptionsOrNone(desc_proto), - is_extendable=is_extendable, - extension_ranges=extension_ranges, - file=file_desc, - serialized_start=None, - serialized_end=None, - syntax=syntax, - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - for nested in desc.nested_types: - nested.containing_type = desc - for enum in desc.enum_types: - enum.containing_type = desc - for field_index, field_desc in enumerate(desc_proto.field): - if field_desc.HasField('oneof_index'): - oneof_index = field_desc.oneof_index - oneofs[oneof_index].fields.append(fields[field_index]) - fields[field_index].containing_oneof = oneofs[oneof_index] - - scope[_PrefixWithDot(desc_name)] = desc - self._CheckConflictRegister(desc, desc.full_name, desc.file.name) - self._descriptors[desc_name] = desc - return desc - - def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None, - containing_type=None, scope=None, top_level=False): - """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf. - - Args: - enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message. - package: Optional package name for the new message EnumDescriptor. - file_desc: The file containing the enum descriptor. - containing_type: The type containing this enum. - scope: Scope containing available types. - top_level: If True, the enum is a top level symbol. If False, the enum - is defined inside a message. - - Returns: - The added descriptor - """ - - if package: - enum_name = '.'.join((package, enum_proto.name)) - else: - enum_name = enum_proto.name - - if file_desc is None: - file_name = None - else: - file_name = file_desc.name - - values = [self._MakeEnumValueDescriptor(value, index) - for index, value in enumerate(enum_proto.value)] - desc = descriptor.EnumDescriptor(name=enum_proto.name, - full_name=enum_name, - filename=file_name, - file=file_desc, - values=values, - containing_type=containing_type, - options=_OptionsOrNone(enum_proto), - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - scope['.%s' % enum_name] = desc - self._CheckConflictRegister(desc, desc.full_name, desc.file.name) - self._enum_descriptors[enum_name] = desc - - # Add top level enum values. - if top_level: - for value in values: - full_name = _NormalizeFullyQualifiedName( - '.'.join((package, value.name))) - self._CheckConflictRegister(value, full_name, file_name) - self._top_enum_values[full_name] = value - - return desc - - def _MakeFieldDescriptor(self, field_proto, message_name, index, - file_desc, is_extension=False): - """Creates a field descriptor from a FieldDescriptorProto. - - For message and enum type fields, this method will do a look up - in the pool for the appropriate descriptor for that type. If it - is unavailable, it will fall back to the _source function to - create it. If this type is still unavailable, construction will - fail. - - Args: - field_proto: The proto describing the field. - message_name: The name of the containing message. - index: Index of the field - file_desc: The file containing the field descriptor. - is_extension: Indication that this field is for an extension. - - Returns: - An initialized FieldDescriptor object - """ - - if message_name: - full_name = '.'.join((message_name, field_proto.name)) - else: - full_name = field_proto.name - - if field_proto.json_name: - json_name = field_proto.json_name - else: - json_name = None - - return descriptor.FieldDescriptor( - name=field_proto.name, - full_name=full_name, - index=index, - number=field_proto.number, - type=field_proto.type, - cpp_type=None, - message_type=None, - enum_type=None, - containing_type=None, - label=field_proto.label, - has_default_value=False, - default_value=None, - is_extension=is_extension, - extension_scope=None, - options=_OptionsOrNone(field_proto), - json_name=json_name, - file=file_desc, - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - - def _SetAllFieldTypes(self, package, desc_proto, scope): - """Sets all the descriptor's fields's types. - - This method also sets the containing types on any extensions. - - Args: - package: The current package of desc_proto. - desc_proto: The message descriptor to update. - scope: Enclosing scope of available types. - """ - - package = _PrefixWithDot(package) - - main_desc = self._GetTypeFromScope(package, desc_proto.name, scope) - - if package == '.': - nested_package = _PrefixWithDot(desc_proto.name) - else: - nested_package = '.'.join([package, desc_proto.name]) - - for field_proto, field_desc in zip(desc_proto.field, main_desc.fields): - self._SetFieldType(field_proto, field_desc, nested_package, scope) - - for extension_proto, extension_desc in ( - zip(desc_proto.extension, main_desc.extensions)): - extension_desc.containing_type = self._GetTypeFromScope( - nested_package, extension_proto.extendee, scope) - self._SetFieldType(extension_proto, extension_desc, nested_package, scope) - - for nested_type in desc_proto.nested_type: - self._SetAllFieldTypes(nested_package, nested_type, scope) - - def _SetFieldType(self, field_proto, field_desc, package, scope): - """Sets the field's type, cpp_type, message_type and enum_type. - - Args: - field_proto: Data about the field in proto format. - field_desc: The descriptor to modify. - package: The package the field's container is in. - scope: Enclosing scope of available types. - """ - if field_proto.type_name: - desc = self._GetTypeFromScope(package, field_proto.type_name, scope) - else: - desc = None - - if not field_proto.HasField('type'): - if isinstance(desc, descriptor.Descriptor): - field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE - else: - field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM - - field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType( - field_proto.type) - - if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE - or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP): - field_desc.message_type = desc - - if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: - field_desc.enum_type = desc - - if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED: - field_desc.has_default_value = False - field_desc.default_value = [] - elif field_proto.HasField('default_value'): - field_desc.has_default_value = True - if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or - field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): - field_desc.default_value = float(field_proto.default_value) - elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: - field_desc.default_value = field_proto.default_value - elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: - field_desc.default_value = field_proto.default_value.lower() == 'true' - elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: - field_desc.default_value = field_desc.enum_type.values_by_name[ - field_proto.default_value].number - elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: - field_desc.default_value = text_encoding.CUnescape( - field_proto.default_value) - elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: - field_desc.default_value = None - else: - # All other types are of the "int" type. - field_desc.default_value = int(field_proto.default_value) - else: - field_desc.has_default_value = False - if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or - field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT): - field_desc.default_value = 0.0 - elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING: - field_desc.default_value = u'' - elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL: - field_desc.default_value = False - elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM: - field_desc.default_value = field_desc.enum_type.values[0].number - elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES: - field_desc.default_value = b'' - elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE: - field_desc.default_value = None - elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP: - field_desc.default_value = None - else: - # All other types are of the "int" type. - field_desc.default_value = 0 - - field_desc.type = field_proto.type - - def _MakeEnumValueDescriptor(self, value_proto, index): - """Creates a enum value descriptor object from a enum value proto. - - Args: - value_proto: The proto describing the enum value. - index: The index of the enum value. - - Returns: - An initialized EnumValueDescriptor object. - """ - - return descriptor.EnumValueDescriptor( - name=value_proto.name, - index=index, - number=value_proto.number, - options=_OptionsOrNone(value_proto), - type=None, - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - - def _MakeServiceDescriptor(self, service_proto, service_index, scope, - package, file_desc): - """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto. - - Args: - service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message. - service_index: The index of the service in the File. - scope: Dict mapping short and full symbols to message and enum types. - package: Optional package name for the new message EnumDescriptor. - file_desc: The file containing the service descriptor. - - Returns: - The added descriptor. - """ - - if package: - service_name = '.'.join((package, service_proto.name)) - else: - service_name = service_proto.name - - methods = [self._MakeMethodDescriptor(method_proto, service_name, package, - scope, index) - for index, method_proto in enumerate(service_proto.method)] - desc = descriptor.ServiceDescriptor( - name=service_proto.name, - full_name=service_name, - index=service_index, - methods=methods, - options=_OptionsOrNone(service_proto), - file=file_desc, - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - self._CheckConflictRegister(desc, desc.full_name, desc.file.name) - self._service_descriptors[service_name] = desc - return desc - - def _MakeMethodDescriptor(self, method_proto, service_name, package, scope, - index): - """Creates a method descriptor from a MethodDescriptorProto. - - Args: - method_proto: The proto describing the method. - service_name: The name of the containing service. - package: Optional package name to look up for types. - scope: Scope containing available types. - index: Index of the method in the service. - - Returns: - An initialized MethodDescriptor object. - """ - full_name = '.'.join((service_name, method_proto.name)) - input_type = self._GetTypeFromScope( - package, method_proto.input_type, scope) - output_type = self._GetTypeFromScope( - package, method_proto.output_type, scope) - return descriptor.MethodDescriptor( - name=method_proto.name, - full_name=full_name, - index=index, - containing_service=None, - input_type=input_type, - output_type=output_type, - client_streaming=method_proto.client_streaming, - server_streaming=method_proto.server_streaming, - options=_OptionsOrNone(method_proto), - # pylint: disable=protected-access - create_key=descriptor._internal_create_key) - - def _ExtractSymbols(self, descriptors): - """Pulls out all the symbols from descriptor protos. - - Args: - descriptors: The messages to extract descriptors from. - Yields: - A two element tuple of the type name and descriptor object. - """ - - for desc in descriptors: - yield (_PrefixWithDot(desc.full_name), desc) - for symbol in self._ExtractSymbols(desc.nested_types): - yield symbol - for enum in desc.enum_types: - yield (_PrefixWithDot(enum.full_name), enum) - - def _GetDeps(self, dependencies, visited=None): - """Recursively finds dependencies for file protos. - - Args: - dependencies: The names of the files being depended on. - visited: The names of files already found. - - Yields: - Each direct and indirect dependency. - """ - - visited = visited or set() - for dependency in dependencies: - if dependency not in visited: - visited.add(dependency) - dep_desc = self.FindFileByName(dependency) - yield dep_desc - public_files = [d.name for d in dep_desc.public_dependencies] - yield from self._GetDeps(public_files, visited) - - def _GetTypeFromScope(self, package, type_name, scope): - """Finds a given type name in the current scope. - - Args: - package: The package the proto should be located in. - type_name: The name of the type to be found in the scope. - scope: Dict mapping short and full symbols to message and enum types. - - Returns: - The descriptor for the requested type. - """ - if type_name not in scope: - components = _PrefixWithDot(package).split('.') - while components: - possible_match = '.'.join(components + [type_name]) - if possible_match in scope: - type_name = possible_match - break - else: - components.pop(-1) - return scope[type_name] - - -def _PrefixWithDot(name): - return name if name.startswith('.') else '.%s' % name - - -if _USE_C_DESCRIPTORS: - # TODO(amauryfa): This pool could be constructed from Python code, when we - # support a flag like 'use_cpp_generated_pool=True'. - # pylint: disable=protected-access - _DEFAULT = descriptor._message.default_pool -else: - _DEFAULT = DescriptorPool() - - -def Default(): - return _DEFAULT diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/duration_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/duration_pb2.py deleted file mode 100644 index a8ecc07bdf..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/duration_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/duration.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _DURATION._serialized_start=51 - _DURATION._serialized_end=93 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/empty_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/empty_pb2.py deleted file mode 100644 index 0b4d554db3..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/empty_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/empty.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _EMPTY._serialized_start=48 - _EMPTY._serialized_end=55 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/field_mask_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/field_mask_pb2.py deleted file mode 100644 index 80a4e96e59..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/field_mask_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/field_mask.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _FIELDMASK._serialized_start=53 - _FIELDMASK._serialized_end=79 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/__init__.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/_parameterized.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/_parameterized.py deleted file mode 100644 index afdbb78c36..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/_parameterized.py +++ /dev/null @@ -1,443 +0,0 @@ -#! /usr/bin/env python -# -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Adds support for parameterized tests to Python's unittest TestCase class. - -A parameterized test is a method in a test case that is invoked with different -argument tuples. - -A simple example: - - class AdditionExample(parameterized.TestCase): - @parameterized.parameters( - (1, 2, 3), - (4, 5, 9), - (1, 1, 3)) - def testAddition(self, op1, op2, result): - self.assertEqual(result, op1 + op2) - - -Each invocation is a separate test case and properly isolated just -like a normal test method, with its own setUp/tearDown cycle. In the -example above, there are three separate testcases, one of which will -fail due to an assertion error (1 + 1 != 3). - -Parameters for individual test cases can be tuples (with positional parameters) -or dictionaries (with named parameters): - - class AdditionExample(parameterized.TestCase): - @parameterized.parameters( - {'op1': 1, 'op2': 2, 'result': 3}, - {'op1': 4, 'op2': 5, 'result': 9}, - ) - def testAddition(self, op1, op2, result): - self.assertEqual(result, op1 + op2) - -If a parameterized test fails, the error message will show the -original test name (which is modified internally) and the arguments -for the specific invocation, which are part of the string returned by -the shortDescription() method on test cases. - -The id method of the test, used internally by the unittest framework, -is also modified to show the arguments. To make sure that test names -stay the same across several invocations, object representations like - - >>> class Foo(object): - ... pass - >>> repr(Foo()) - '<__main__.Foo object at 0x23d8610>' - -are turned into '<__main__.Foo>'. For even more descriptive names, -especially in test logs, you can use the named_parameters decorator. In -this case, only tuples are supported, and the first parameters has to -be a string (or an object that returns an apt name when converted via -str()): - - class NamedExample(parameterized.TestCase): - @parameterized.named_parameters( - ('Normal', 'aa', 'aaa', True), - ('EmptyPrefix', '', 'abc', True), - ('BothEmpty', '', '', True)) - def testStartsWith(self, prefix, string, result): - self.assertEqual(result, strings.startswith(prefix)) - -Named tests also have the benefit that they can be run individually -from the command line: - - $ testmodule.py NamedExample.testStartsWithNormal - . - -------------------------------------------------------------------- - Ran 1 test in 0.000s - - OK - -Parameterized Classes -===================== -If invocation arguments are shared across test methods in a single -TestCase class, instead of decorating all test methods -individually, the class itself can be decorated: - - @parameterized.parameters( - (1, 2, 3) - (4, 5, 9)) - class ArithmeticTest(parameterized.TestCase): - def testAdd(self, arg1, arg2, result): - self.assertEqual(arg1 + arg2, result) - - def testSubtract(self, arg2, arg2, result): - self.assertEqual(result - arg1, arg2) - -Inputs from Iterables -===================== -If parameters should be shared across several test cases, or are dynamically -created from other sources, a single non-tuple iterable can be passed into -the decorator. This iterable will be used to obtain the test cases: - - class AdditionExample(parameterized.TestCase): - @parameterized.parameters( - c.op1, c.op2, c.result for c in testcases - ) - def testAddition(self, op1, op2, result): - self.assertEqual(result, op1 + op2) - - -Single-Argument Test Methods -============================ -If a test method takes only one argument, the single argument does not need to -be wrapped into a tuple: - - class NegativeNumberExample(parameterized.TestCase): - @parameterized.parameters( - -1, -3, -4, -5 - ) - def testIsNegative(self, arg): - self.assertTrue(IsNegative(arg)) -""" - -__author__ = 'tmarek@google.com (Torsten Marek)' - -import functools -import re -import types -import unittest -import uuid - -try: - # Since python 3 - import collections.abc as collections_abc -except ImportError: - # Won't work after python 3.8 - import collections as collections_abc - -ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>') -_SEPARATOR = uuid.uuid1().hex -_FIRST_ARG = object() -_ARGUMENT_REPR = object() - - -def _CleanRepr(obj): - return ADDR_RE.sub(r'<\1>', repr(obj)) - - -# Helper function formerly from the unittest module, removed from it in -# Python 2.7. -def _StrClass(cls): - return '%s.%s' % (cls.__module__, cls.__name__) - - -def _NonStringIterable(obj): - return (isinstance(obj, collections_abc.Iterable) and - not isinstance(obj, str)) - - -def _FormatParameterList(testcase_params): - if isinstance(testcase_params, collections_abc.Mapping): - return ', '.join('%s=%s' % (argname, _CleanRepr(value)) - for argname, value in testcase_params.items()) - elif _NonStringIterable(testcase_params): - return ', '.join(map(_CleanRepr, testcase_params)) - else: - return _FormatParameterList((testcase_params,)) - - -class _ParameterizedTestIter(object): - """Callable and iterable class for producing new test cases.""" - - def __init__(self, test_method, testcases, naming_type): - """Returns concrete test functions for a test and a list of parameters. - - The naming_type is used to determine the name of the concrete - functions as reported by the unittest framework. If naming_type is - _FIRST_ARG, the testcases must be tuples, and the first element must - have a string representation that is a valid Python identifier. - - Args: - test_method: The decorated test method. - testcases: (list of tuple/dict) A list of parameter - tuples/dicts for individual test invocations. - naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR. - """ - self._test_method = test_method - self.testcases = testcases - self._naming_type = naming_type - - def __call__(self, *args, **kwargs): - raise RuntimeError('You appear to be running a parameterized test case ' - 'without having inherited from parameterized.' - 'TestCase. This is bad because none of ' - 'your test cases are actually being run.') - - def __iter__(self): - test_method = self._test_method - naming_type = self._naming_type - - def MakeBoundParamTest(testcase_params): - @functools.wraps(test_method) - def BoundParamTest(self): - if isinstance(testcase_params, collections_abc.Mapping): - test_method(self, **testcase_params) - elif _NonStringIterable(testcase_params): - test_method(self, *testcase_params) - else: - test_method(self, testcase_params) - - if naming_type is _FIRST_ARG: - # Signal the metaclass that the name of the test function is unique - # and descriptive. - BoundParamTest.__x_use_name__ = True - BoundParamTest.__name__ += str(testcase_params[0]) - testcase_params = testcase_params[1:] - elif naming_type is _ARGUMENT_REPR: - # __x_extra_id__ is used to pass naming information to the __new__ - # method of TestGeneratorMetaclass. - # The metaclass will make sure to create a unique, but nondescriptive - # name for this test. - BoundParamTest.__x_extra_id__ = '(%s)' % ( - _FormatParameterList(testcase_params),) - else: - raise RuntimeError('%s is not a valid naming type.' % (naming_type,)) - - BoundParamTest.__doc__ = '%s(%s)' % ( - BoundParamTest.__name__, _FormatParameterList(testcase_params)) - if test_method.__doc__: - BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,) - return BoundParamTest - return (MakeBoundParamTest(c) for c in self.testcases) - - -def _IsSingletonList(testcases): - """True iff testcases contains only a single non-tuple element.""" - return len(testcases) == 1 and not isinstance(testcases[0], tuple) - - -def _ModifyClass(class_object, testcases, naming_type): - assert not getattr(class_object, '_id_suffix', None), ( - 'Cannot add parameters to %s,' - ' which already has parameterized methods.' % (class_object,)) - class_object._id_suffix = id_suffix = {} - # We change the size of __dict__ while we iterate over it, - # which Python 3.x will complain about, so use copy(). - for name, obj in class_object.__dict__.copy().items(): - if (name.startswith(unittest.TestLoader.testMethodPrefix) - and isinstance(obj, types.FunctionType)): - delattr(class_object, name) - methods = {} - _UpdateClassDictForParamTestCase( - methods, id_suffix, name, - _ParameterizedTestIter(obj, testcases, naming_type)) - for name, meth in methods.items(): - setattr(class_object, name, meth) - - -def _ParameterDecorator(naming_type, testcases): - """Implementation of the parameterization decorators. - - Args: - naming_type: The naming type. - testcases: Testcase parameters. - - Returns: - A function for modifying the decorated object. - """ - def _Apply(obj): - if isinstance(obj, type): - _ModifyClass( - obj, - list(testcases) if not isinstance(testcases, collections_abc.Sequence) - else testcases, - naming_type) - return obj - else: - return _ParameterizedTestIter(obj, testcases, naming_type) - - if _IsSingletonList(testcases): - assert _NonStringIterable(testcases[0]), ( - 'Single parameter argument must be a non-string iterable') - testcases = testcases[0] - - return _Apply - - -def parameters(*testcases): # pylint: disable=invalid-name - """A decorator for creating parameterized tests. - - See the module docstring for a usage example. - Args: - *testcases: Parameters for the decorated method, either a single - iterable, or a list of tuples/dicts/objects (for tests - with only one argument). - - Returns: - A test generator to be handled by TestGeneratorMetaclass. - """ - return _ParameterDecorator(_ARGUMENT_REPR, testcases) - - -def named_parameters(*testcases): # pylint: disable=invalid-name - """A decorator for creating parameterized tests. - - See the module docstring for a usage example. The first element of - each parameter tuple should be a string and will be appended to the - name of the test method. - - Args: - *testcases: Parameters for the decorated method, either a single - iterable, or a list of tuples. - - Returns: - A test generator to be handled by TestGeneratorMetaclass. - """ - return _ParameterDecorator(_FIRST_ARG, testcases) - - -class TestGeneratorMetaclass(type): - """Metaclass for test cases with test generators. - - A test generator is an iterable in a testcase that produces callables. These - callables must be single-argument methods. These methods are injected into - the class namespace and the original iterable is removed. If the name of the - iterable conforms to the test pattern, the injected methods will be picked - up as tests by the unittest framework. - - In general, it is supposed to be used in conjunction with the - parameters decorator. - """ - - def __new__(mcs, class_name, bases, dct): - dct['_id_suffix'] = id_suffix = {} - for name, obj in dct.copy().items(): - if (name.startswith(unittest.TestLoader.testMethodPrefix) and - _NonStringIterable(obj)): - iterator = iter(obj) - dct.pop(name) - _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator) - - return type.__new__(mcs, class_name, bases, dct) - - -def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator): - """Adds individual test cases to a dictionary. - - Args: - dct: The target dictionary. - id_suffix: The dictionary for mapping names to test IDs. - name: The original name of the test case. - iterator: The iterator generating the individual test cases. - """ - for idx, func in enumerate(iterator): - assert callable(func), 'Test generators must yield callables, got %r' % ( - func,) - if getattr(func, '__x_use_name__', False): - new_name = func.__name__ - else: - new_name = '%s%s%d' % (name, _SEPARATOR, idx) - assert new_name not in dct, ( - 'Name of parameterized test case "%s" not unique' % (new_name,)) - dct[new_name] = func - id_suffix[new_name] = getattr(func, '__x_extra_id__', '') - - -class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass): - """Base class for test cases using the parameters decorator.""" - - def _OriginalName(self): - return self._testMethodName.split(_SEPARATOR)[0] - - def __str__(self): - return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__)) - - def id(self): # pylint: disable=invalid-name - """Returns the descriptive ID of the test. - - This is used internally by the unittesting framework to get a name - for the test to be used in reports. - - Returns: - The test id. - """ - return '%s.%s%s' % (_StrClass(self.__class__), - self._OriginalName(), - self._id_suffix.get(self._testMethodName, '')) - - -def CoopTestCase(other_base_class): - """Returns a new base class with a cooperative metaclass base. - - This enables the TestCase to be used in combination - with other base classes that have custom metaclasses, such as - mox.MoxTestBase. - - Only works with metaclasses that do not override type.__new__. - - Example: - - import google3 - import mox - - from google3.testing.pybase import parameterized - - class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)): - ... - - Args: - other_base_class: (class) A test case base class. - - Returns: - A new class object. - """ - metaclass = type( - 'CoopMetaclass', - (other_base_class.__metaclass__, - TestGeneratorMetaclass), {}) - return metaclass( - 'CoopTestCase', - (other_base_class, TestCase), {}) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/api_implementation.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/api_implementation.py deleted file mode 100644 index 7fef237670..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/api_implementation.py +++ /dev/null @@ -1,112 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Determine which implementation of the protobuf API is used in this process. -""" - -import os -import sys -import warnings - -try: - # pylint: disable=g-import-not-at-top - from google.protobuf.internal import _api_implementation - # The compile-time constants in the _api_implementation module can be used to - # switch to a certain implementation of the Python API at build time. - _api_version = _api_implementation.api_version -except ImportError: - _api_version = -1 # Unspecified by compiler flags. - -if _api_version == 1: - raise ValueError('api_version=1 is no longer supported.') - - -_default_implementation_type = ('cpp' if _api_version > 0 else 'python') - - -# This environment variable can be used to switch to a certain implementation -# of the Python API, overriding the compile-time constants in the -# _api_implementation module. Right now only 'python' and 'cpp' are valid -# values. Any other value will be ignored. -_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', - _default_implementation_type) - -if _implementation_type != 'python': - _implementation_type = 'cpp' - -if 'PyPy' in sys.version and _implementation_type == 'cpp': - warnings.warn('PyPy does not work yet with cpp protocol buffers. ' - 'Falling back to the python implementation.') - _implementation_type = 'python' - - -# Detect if serialization should be deterministic by default -try: - # The presence of this module in a build allows the proto implementation to - # be upgraded merely via build deps. - # - # NOTE: Merely importing this automatically enables deterministic proto - # serialization for C++ code, but we still need to export it as a boolean so - # that we can do the same for `_implementation_type == 'python'`. - # - # NOTE2: It is possible for C++ code to enable deterministic serialization by - # default _without_ affecting Python code, if the C++ implementation is not in - # use by this module. That is intended behavior, so we don't actually expose - # this boolean outside of this module. - # - # pylint: disable=g-import-not-at-top,unused-import - from google.protobuf import enable_deterministic_proto_serialization - _python_deterministic_proto_serialization = True -except ImportError: - _python_deterministic_proto_serialization = False - - -# Usage of this function is discouraged. Clients shouldn't care which -# implementation of the API is in use. Note that there is no guarantee -# that differences between APIs will be maintained. -# Please don't use this function if possible. -def Type(): - return _implementation_type - - -def _SetType(implementation_type): - """Never use! Only for protobuf benchmark.""" - global _implementation_type - _implementation_type = implementation_type - - -# See comment on 'Type' above. -def Version(): - return 2 - - -# For internal use only -def IsPythonDefaultSerializationDeterministic(): - return _python_deterministic_proto_serialization diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/builder.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/builder.py deleted file mode 100644 index 64353ee4af..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/builder.py +++ /dev/null @@ -1,130 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Builds descriptors, message classes and services for generated _pb2.py. - -This file is only called in python generated _pb2.py files. It builds -descriptors, message classes and services that users can directly use -in generated code. -""" - -__author__ = 'jieluo@google.com (Jie Luo)' - -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -_sym_db = _symbol_database.Default() - - -def BuildMessageAndEnumDescriptors(file_des, module): - """Builds message and enum descriptors. - - Args: - file_des: FileDescriptor of the .proto file - module: Generated _pb2 module - """ - - def BuildNestedDescriptors(msg_des, prefix): - for (name, nested_msg) in msg_des.nested_types_by_name.items(): - module_name = prefix + name.upper() - module[module_name] = nested_msg - BuildNestedDescriptors(nested_msg, module_name + '_') - for enum_des in msg_des.enum_types: - module[prefix + enum_des.name.upper()] = enum_des - - for (name, msg_des) in file_des.message_types_by_name.items(): - module_name = '_' + name.upper() - module[module_name] = msg_des - BuildNestedDescriptors(msg_des, module_name + '_') - - -def BuildTopDescriptorsAndMessages(file_des, module_name, module): - """Builds top level descriptors and message classes. - - Args: - file_des: FileDescriptor of the .proto file - module_name: str, the name of generated _pb2 module - module: Generated _pb2 module - """ - - def BuildMessage(msg_des): - create_dict = {} - for (name, nested_msg) in msg_des.nested_types_by_name.items(): - create_dict[name] = BuildMessage(nested_msg) - create_dict['DESCRIPTOR'] = msg_des - create_dict['__module__'] = module_name - message_class = _reflection.GeneratedProtocolMessageType( - msg_des.name, (_message.Message,), create_dict) - _sym_db.RegisterMessage(message_class) - return message_class - - # top level enums - for (name, enum_des) in file_des.enum_types_by_name.items(): - module['_' + name.upper()] = enum_des - module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) - for enum_value in enum_des.values: - module[enum_value.name] = enum_value.number - - # top level extensions - for (name, extension_des) in file_des.extensions_by_name.items(): - module[name.upper() + '_FIELD_NUMBER'] = extension_des.number - module[name] = extension_des - - # services - for (name, service) in file_des.services_by_name.items(): - module['_' + name.upper()] = service - - # Build messages. - for (name, msg_des) in file_des.message_types_by_name.items(): - module[name] = BuildMessage(msg_des) - - -def BuildServices(file_des, module_name, module): - """Builds services classes and services stub class. - - Args: - file_des: FileDescriptor of the .proto file - module_name: str, the name of generated _pb2 module - module: Generated _pb2 module - """ - # pylint: disable=g-import-not-at-top - from google.protobuf import service as _service - from google.protobuf import service_reflection - # pylint: enable=g-import-not-at-top - for (name, service) in file_des.services_by_name.items(): - module[name] = service_reflection.GeneratedServiceType( - name, (_service.Service,), - dict(DESCRIPTOR=service, __module__=module_name)) - stub_name = name + '_Stub' - module[stub_name] = service_reflection.GeneratedServiceStubType( - stub_name, (module[name],), - dict(DESCRIPTOR=service, __module__=module_name)) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/containers.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/containers.py deleted file mode 100644 index 29fbb53d2f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/containers.py +++ /dev/null @@ -1,710 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains container classes to represent different protocol buffer types. - -This file defines container classes which represent categories of protocol -buffer field types which need extra maintenance. Currently these categories -are: - -- Repeated scalar fields - These are all repeated fields which aren't - composite (e.g. they are of simple types like int32, string, etc). -- Repeated composite fields - Repeated fields which are composite. This - includes groups and nested messages. -""" - -import collections.abc -import copy -import pickle -from typing import ( - Any, - Iterable, - Iterator, - List, - MutableMapping, - MutableSequence, - NoReturn, - Optional, - Sequence, - TypeVar, - Union, - overload, -) - - -_T = TypeVar('_T') -_K = TypeVar('_K') -_V = TypeVar('_V') - - -class BaseContainer(Sequence[_T]): - """Base container class.""" - - # Minimizes memory usage and disallows assignment to other attributes. - __slots__ = ['_message_listener', '_values'] - - def __init__(self, message_listener: Any) -> None: - """ - Args: - message_listener: A MessageListener implementation. - The RepeatedScalarFieldContainer will call this object's - Modified() method when it is modified. - """ - self._message_listener = message_listener - self._values = [] - - @overload - def __getitem__(self, key: int) -> _T: - ... - - @overload - def __getitem__(self, key: slice) -> List[_T]: - ... - - def __getitem__(self, key): - """Retrieves item by the specified key.""" - return self._values[key] - - def __len__(self) -> int: - """Returns the number of elements in the container.""" - return len(self._values) - - def __ne__(self, other: Any) -> bool: - """Checks if another instance isn't equal to this one.""" - # The concrete classes should define __eq__. - return not self == other - - __hash__ = None - - def __repr__(self) -> str: - return repr(self._values) - - def sort(self, *args, **kwargs) -> None: - # Continue to support the old sort_function keyword argument. - # This is expected to be a rare occurrence, so use LBYL to avoid - # the overhead of actually catching KeyError. - if 'sort_function' in kwargs: - kwargs['cmp'] = kwargs.pop('sort_function') - self._values.sort(*args, **kwargs) - - def reverse(self) -> None: - self._values.reverse() - - -# TODO(slebedev): Remove this. BaseContainer does *not* conform to -# MutableSequence, only its subclasses do. -collections.abc.MutableSequence.register(BaseContainer) - - -class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]): - """Simple, type-checked, list-like container for holding repeated scalars.""" - - # Disallows assignment to other attributes. - __slots__ = ['_type_checker'] - - def __init__( - self, - message_listener: Any, - type_checker: Any, - ) -> None: - """Args: - - message_listener: A MessageListener implementation. The - RepeatedScalarFieldContainer will call this object's Modified() method - when it is modified. - type_checker: A type_checkers.ValueChecker instance to run on elements - inserted into this container. - """ - super().__init__(message_listener) - self._type_checker = type_checker - - def append(self, value: _T) -> None: - """Appends an item to the list. Similar to list.append().""" - self._values.append(self._type_checker.CheckValue(value)) - if not self._message_listener.dirty: - self._message_listener.Modified() - - def insert(self, key: int, value: _T) -> None: - """Inserts the item at the specified position. Similar to list.insert().""" - self._values.insert(key, self._type_checker.CheckValue(value)) - if not self._message_listener.dirty: - self._message_listener.Modified() - - def extend(self, elem_seq: Iterable[_T]) -> None: - """Extends by appending the given iterable. Similar to list.extend().""" - if elem_seq is None: - return - try: - elem_seq_iter = iter(elem_seq) - except TypeError: - if not elem_seq: - # silently ignore falsy inputs :-/. - # TODO(ptucker): Deprecate this behavior. b/18413862 - return - raise - - new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] - if new_values: - self._values.extend(new_values) - self._message_listener.Modified() - - def MergeFrom( - self, - other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]], - ) -> None: - """Appends the contents of another repeated field of the same type to this - one. We do not check the types of the individual fields. - """ - self._values.extend(other) - self._message_listener.Modified() - - def remove(self, elem: _T): - """Removes an item from the list. Similar to list.remove().""" - self._values.remove(elem) - self._message_listener.Modified() - - def pop(self, key: Optional[int] = -1) -> _T: - """Removes and returns an item at a given index. Similar to list.pop().""" - value = self._values[key] - self.__delitem__(key) - return value - - @overload - def __setitem__(self, key: int, value: _T) -> None: - ... - - @overload - def __setitem__(self, key: slice, value: Iterable[_T]) -> None: - ... - - def __setitem__(self, key, value) -> None: - """Sets the item on the specified position.""" - if isinstance(key, slice): - if key.step is not None: - raise ValueError('Extended slices not supported') - self._values[key] = map(self._type_checker.CheckValue, value) - self._message_listener.Modified() - else: - self._values[key] = self._type_checker.CheckValue(value) - self._message_listener.Modified() - - def __delitem__(self, key: Union[int, slice]) -> None: - """Deletes the item at the specified position.""" - del self._values[key] - self._message_listener.Modified() - - def __eq__(self, other: Any) -> bool: - """Compares the current instance with another one.""" - if self is other: - return True - # Special case for the same type which should be common and fast. - if isinstance(other, self.__class__): - return other._values == self._values - # We are presumably comparing against some other sequence type. - return other == self._values - - def __deepcopy__( - self, - unused_memo: Any = None, - ) -> 'RepeatedScalarFieldContainer[_T]': - clone = RepeatedScalarFieldContainer( - copy.deepcopy(self._message_listener), self._type_checker) - clone.MergeFrom(self) - return clone - - def __reduce__(self, **kwargs) -> NoReturn: - raise pickle.PickleError( - "Can't pickle repeated scalar fields, convert to list first") - - -# TODO(slebedev): Constrain T to be a subtype of Message. -class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]): - """Simple, list-like container for holding repeated composite fields.""" - - # Disallows assignment to other attributes. - __slots__ = ['_message_descriptor'] - - def __init__(self, message_listener: Any, message_descriptor: Any) -> None: - """ - Note that we pass in a descriptor instead of the generated directly, - since at the time we construct a _RepeatedCompositeFieldContainer we - haven't yet necessarily initialized the type that will be contained in the - container. - - Args: - message_listener: A MessageListener implementation. - The RepeatedCompositeFieldContainer will call this object's - Modified() method when it is modified. - message_descriptor: A Descriptor instance describing the protocol type - that should be present in this container. We'll use the - _concrete_class field of this descriptor when the client calls add(). - """ - super().__init__(message_listener) - self._message_descriptor = message_descriptor - - def add(self, **kwargs: Any) -> _T: - """Adds a new element at the end of the list and returns it. Keyword - arguments may be used to initialize the element. - """ - new_element = self._message_descriptor._concrete_class(**kwargs) - new_element._SetListener(self._message_listener) - self._values.append(new_element) - if not self._message_listener.dirty: - self._message_listener.Modified() - return new_element - - def append(self, value: _T) -> None: - """Appends one element by copying the message.""" - new_element = self._message_descriptor._concrete_class() - new_element._SetListener(self._message_listener) - new_element.CopyFrom(value) - self._values.append(new_element) - if not self._message_listener.dirty: - self._message_listener.Modified() - - def insert(self, key: int, value: _T) -> None: - """Inserts the item at the specified position by copying.""" - new_element = self._message_descriptor._concrete_class() - new_element._SetListener(self._message_listener) - new_element.CopyFrom(value) - self._values.insert(key, new_element) - if not self._message_listener.dirty: - self._message_listener.Modified() - - def extend(self, elem_seq: Iterable[_T]) -> None: - """Extends by appending the given sequence of elements of the same type - - as this one, copying each individual message. - """ - message_class = self._message_descriptor._concrete_class - listener = self._message_listener - values = self._values - for message in elem_seq: - new_element = message_class() - new_element._SetListener(listener) - new_element.MergeFrom(message) - values.append(new_element) - listener.Modified() - - def MergeFrom( - self, - other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]], - ) -> None: - """Appends the contents of another repeated field of the same type to this - one, copying each individual message. - """ - self.extend(other) - - def remove(self, elem: _T) -> None: - """Removes an item from the list. Similar to list.remove().""" - self._values.remove(elem) - self._message_listener.Modified() - - def pop(self, key: Optional[int] = -1) -> _T: - """Removes and returns an item at a given index. Similar to list.pop().""" - value = self._values[key] - self.__delitem__(key) - return value - - @overload - def __setitem__(self, key: int, value: _T) -> None: - ... - - @overload - def __setitem__(self, key: slice, value: Iterable[_T]) -> None: - ... - - def __setitem__(self, key, value): - # This method is implemented to make RepeatedCompositeFieldContainer - # structurally compatible with typing.MutableSequence. It is - # otherwise unsupported and will always raise an error. - raise TypeError( - f'{self.__class__.__name__} object does not support item assignment') - - def __delitem__(self, key: Union[int, slice]) -> None: - """Deletes the item at the specified position.""" - del self._values[key] - self._message_listener.Modified() - - def __eq__(self, other: Any) -> bool: - """Compares the current instance with another one.""" - if self is other: - return True - if not isinstance(other, self.__class__): - raise TypeError('Can only compare repeated composite fields against ' - 'other repeated composite fields.') - return self._values == other._values - - -class ScalarMap(MutableMapping[_K, _V]): - """Simple, type-checked, dict-like container for holding repeated scalars.""" - - # Disallows assignment to other attributes. - __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener', - '_entry_descriptor'] - - def __init__( - self, - message_listener: Any, - key_checker: Any, - value_checker: Any, - entry_descriptor: Any, - ) -> None: - """ - Args: - message_listener: A MessageListener implementation. - The ScalarMap will call this object's Modified() method when it - is modified. - key_checker: A type_checkers.ValueChecker instance to run on keys - inserted into this container. - value_checker: A type_checkers.ValueChecker instance to run on values - inserted into this container. - entry_descriptor: The MessageDescriptor of a map entry: key and value. - """ - self._message_listener = message_listener - self._key_checker = key_checker - self._value_checker = value_checker - self._entry_descriptor = entry_descriptor - self._values = {} - - def __getitem__(self, key: _K) -> _V: - try: - return self._values[key] - except KeyError: - key = self._key_checker.CheckValue(key) - val = self._value_checker.DefaultValue() - self._values[key] = val - return val - - def __contains__(self, item: _K) -> bool: - # We check the key's type to match the strong-typing flavor of the API. - # Also this makes it easier to match the behavior of the C++ implementation. - self._key_checker.CheckValue(item) - return item in self._values - - @overload - def get(self, key: _K) -> Optional[_V]: - ... - - @overload - def get(self, key: _K, default: _T) -> Union[_V, _T]: - ... - - # We need to override this explicitly, because our defaultdict-like behavior - # will make the default implementation (from our base class) always insert - # the key. - def get(self, key, default=None): - if key in self: - return self[key] - else: - return default - - def __setitem__(self, key: _K, value: _V) -> _T: - checked_key = self._key_checker.CheckValue(key) - checked_value = self._value_checker.CheckValue(value) - self._values[checked_key] = checked_value - self._message_listener.Modified() - - def __delitem__(self, key: _K) -> None: - del self._values[key] - self._message_listener.Modified() - - def __len__(self) -> int: - return len(self._values) - - def __iter__(self) -> Iterator[_K]: - return iter(self._values) - - def __repr__(self) -> str: - return repr(self._values) - - def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None: - self._values.update(other._values) - self._message_listener.Modified() - - def InvalidateIterators(self) -> None: - # It appears that the only way to reliably invalidate iterators to - # self._values is to ensure that its size changes. - original = self._values - self._values = original.copy() - original[None] = None - - # This is defined in the abstract base, but we can do it much more cheaply. - def clear(self) -> None: - self._values.clear() - self._message_listener.Modified() - - def GetEntryClass(self) -> Any: - return self._entry_descriptor._concrete_class - - -class MessageMap(MutableMapping[_K, _V]): - """Simple, type-checked, dict-like container for with submessage values.""" - - # Disallows assignment to other attributes. - __slots__ = ['_key_checker', '_values', '_message_listener', - '_message_descriptor', '_entry_descriptor'] - - def __init__( - self, - message_listener: Any, - message_descriptor: Any, - key_checker: Any, - entry_descriptor: Any, - ) -> None: - """ - Args: - message_listener: A MessageListener implementation. - The ScalarMap will call this object's Modified() method when it - is modified. - key_checker: A type_checkers.ValueChecker instance to run on keys - inserted into this container. - value_checker: A type_checkers.ValueChecker instance to run on values - inserted into this container. - entry_descriptor: The MessageDescriptor of a map entry: key and value. - """ - self._message_listener = message_listener - self._message_descriptor = message_descriptor - self._key_checker = key_checker - self._entry_descriptor = entry_descriptor - self._values = {} - - def __getitem__(self, key: _K) -> _V: - key = self._key_checker.CheckValue(key) - try: - return self._values[key] - except KeyError: - new_element = self._message_descriptor._concrete_class() - new_element._SetListener(self._message_listener) - self._values[key] = new_element - self._message_listener.Modified() - return new_element - - def get_or_create(self, key: _K) -> _V: - """get_or_create() is an alias for getitem (ie. map[key]). - - Args: - key: The key to get or create in the map. - - This is useful in cases where you want to be explicit that the call is - mutating the map. This can avoid lint errors for statements like this - that otherwise would appear to be pointless statements: - - msg.my_map[key] - """ - return self[key] - - @overload - def get(self, key: _K) -> Optional[_V]: - ... - - @overload - def get(self, key: _K, default: _T) -> Union[_V, _T]: - ... - - # We need to override this explicitly, because our defaultdict-like behavior - # will make the default implementation (from our base class) always insert - # the key. - def get(self, key, default=None): - if key in self: - return self[key] - else: - return default - - def __contains__(self, item: _K) -> bool: - item = self._key_checker.CheckValue(item) - return item in self._values - - def __setitem__(self, key: _K, value: _V) -> NoReturn: - raise ValueError('May not set values directly, call my_map[key].foo = 5') - - def __delitem__(self, key: _K) -> None: - key = self._key_checker.CheckValue(key) - del self._values[key] - self._message_listener.Modified() - - def __len__(self) -> int: - return len(self._values) - - def __iter__(self) -> Iterator[_K]: - return iter(self._values) - - def __repr__(self) -> str: - return repr(self._values) - - def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None: - # pylint: disable=protected-access - for key in other._values: - # According to documentation: "When parsing from the wire or when merging, - # if there are duplicate map keys the last key seen is used". - if key in self: - del self[key] - self[key].CopyFrom(other[key]) - # self._message_listener.Modified() not required here, because - # mutations to submessages already propagate. - - def InvalidateIterators(self) -> None: - # It appears that the only way to reliably invalidate iterators to - # self._values is to ensure that its size changes. - original = self._values - self._values = original.copy() - original[None] = None - - # This is defined in the abstract base, but we can do it much more cheaply. - def clear(self) -> None: - self._values.clear() - self._message_listener.Modified() - - def GetEntryClass(self) -> Any: - return self._entry_descriptor._concrete_class - - -class _UnknownField: - """A parsed unknown field.""" - - # Disallows assignment to other attributes. - __slots__ = ['_field_number', '_wire_type', '_data'] - - def __init__(self, field_number, wire_type, data): - self._field_number = field_number - self._wire_type = wire_type - self._data = data - return - - def __lt__(self, other): - # pylint: disable=protected-access - return self._field_number < other._field_number - - def __eq__(self, other): - if self is other: - return True - # pylint: disable=protected-access - return (self._field_number == other._field_number and - self._wire_type == other._wire_type and - self._data == other._data) - - -class UnknownFieldRef: # pylint: disable=missing-class-docstring - - def __init__(self, parent, index): - self._parent = parent - self._index = index - - def _check_valid(self): - if not self._parent: - raise ValueError('UnknownField does not exist. ' - 'The parent message might be cleared.') - if self._index >= len(self._parent): - raise ValueError('UnknownField does not exist. ' - 'The parent message might be cleared.') - - @property - def field_number(self): - self._check_valid() - # pylint: disable=protected-access - return self._parent._internal_get(self._index)._field_number - - @property - def wire_type(self): - self._check_valid() - # pylint: disable=protected-access - return self._parent._internal_get(self._index)._wire_type - - @property - def data(self): - self._check_valid() - # pylint: disable=protected-access - return self._parent._internal_get(self._index)._data - - -class UnknownFieldSet: - """UnknownField container""" - - # Disallows assignment to other attributes. - __slots__ = ['_values'] - - def __init__(self): - self._values = [] - - def __getitem__(self, index): - if self._values is None: - raise ValueError('UnknownFields does not exist. ' - 'The parent message might be cleared.') - size = len(self._values) - if index < 0: - index += size - if index < 0 or index >= size: - raise IndexError('index %d out of range'.index) - - return UnknownFieldRef(self, index) - - def _internal_get(self, index): - return self._values[index] - - def __len__(self): - if self._values is None: - raise ValueError('UnknownFields does not exist. ' - 'The parent message might be cleared.') - return len(self._values) - - def _add(self, field_number, wire_type, data): - unknown_field = _UnknownField(field_number, wire_type, data) - self._values.append(unknown_field) - return unknown_field - - def __iter__(self): - for i in range(len(self)): - yield UnknownFieldRef(self, i) - - def _extend(self, other): - if other is None: - return - # pylint: disable=protected-access - self._values.extend(other._values) - - def __eq__(self, other): - if self is other: - return True - # Sort unknown fields because their order shouldn't - # affect equality test. - values = list(self._values) - if other is None: - return not values - values.sort() - # pylint: disable=protected-access - other_values = sorted(other._values) - return values == other_values - - def _clear(self): - for value in self._values: - # pylint: disable=protected-access - if isinstance(value._data, UnknownFieldSet): - value._data._clear() # pylint: disable=protected-access - self._values = None diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/decoder.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/decoder.py deleted file mode 100644 index bc1b7b785c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/decoder.py +++ /dev/null @@ -1,1029 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Code for decoding protocol buffer primitives. - -This code is very similar to encoder.py -- read the docs for that module first. - -A "decoder" is a function with the signature: - Decode(buffer, pos, end, message, field_dict) -The arguments are: - buffer: The string containing the encoded message. - pos: The current position in the string. - end: The position in the string where the current message ends. May be - less than len(buffer) if we're reading a sub-message. - message: The message object into which we're parsing. - field_dict: message._fields (avoids a hashtable lookup). -The decoder reads the field and stores it into field_dict, returning the new -buffer position. A decoder for a repeated field may proactively decode all of -the elements of that field, if they appear consecutively. - -Note that decoders may throw any of the following: - IndexError: Indicates a truncated message. - struct.error: Unpacking of a fixed-width field failed. - message.DecodeError: Other errors. - -Decoders are expected to raise an exception if they are called with pos > end. -This allows callers to be lax about bounds checking: it's fineto read past -"end" as long as you are sure that someone else will notice and throw an -exception later on. - -Something up the call stack is expected to catch IndexError and struct.error -and convert them to message.DecodeError. - -Decoders are constructed using decoder constructors with the signature: - MakeDecoder(field_number, is_repeated, is_packed, key, new_default) -The arguments are: - field_number: The field number of the field we want to decode. - is_repeated: Is the field a repeated field? (bool) - is_packed: Is the field a packed field? (bool) - key: The key to use when looking up the field within field_dict. - (This is actually the FieldDescriptor but nothing in this - file should depend on that.) - new_default: A function which takes a message object as a parameter and - returns a new instance of the default value for this field. - (This is called for repeated fields and sub-messages, when an - instance does not already exist.) - -As with encoders, we define a decoder constructor for every type of field. -Then, for every field of every message class we construct an actual decoder. -That decoder goes into a dict indexed by tag, so when we decode a message -we repeatedly read a tag, look up the corresponding decoder, and invoke it. -""" - -__author__ = 'kenton@google.com (Kenton Varda)' - -import math -import struct - -from google.protobuf.internal import containers -from google.protobuf.internal import encoder -from google.protobuf.internal import wire_format -from google.protobuf import message - - -# This is not for optimization, but rather to avoid conflicts with local -# variables named "message". -_DecodeError = message.DecodeError - - -def _VarintDecoder(mask, result_type): - """Return an encoder for a basic varint value (does not include tag). - - Decoded values will be bitwise-anded with the given mask before being - returned, e.g. to limit them to 32 bits. The returned decoder does not - take the usual "end" parameter -- the caller is expected to do bounds checking - after the fact (often the caller can defer such checking until later). The - decoder returns a (value, new_pos) pair. - """ - - def DecodeVarint(buffer, pos): - result = 0 - shift = 0 - while 1: - b = buffer[pos] - result |= ((b & 0x7f) << shift) - pos += 1 - if not (b & 0x80): - result &= mask - result = result_type(result) - return (result, pos) - shift += 7 - if shift >= 64: - raise _DecodeError('Too many bytes when decoding varint.') - return DecodeVarint - - -def _SignedVarintDecoder(bits, result_type): - """Like _VarintDecoder() but decodes signed values.""" - - signbit = 1 << (bits - 1) - mask = (1 << bits) - 1 - - def DecodeVarint(buffer, pos): - result = 0 - shift = 0 - while 1: - b = buffer[pos] - result |= ((b & 0x7f) << shift) - pos += 1 - if not (b & 0x80): - result &= mask - result = (result ^ signbit) - signbit - result = result_type(result) - return (result, pos) - shift += 7 - if shift >= 64: - raise _DecodeError('Too many bytes when decoding varint.') - return DecodeVarint - -# All 32-bit and 64-bit values are represented as int. -_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) -_DecodeSignedVarint = _SignedVarintDecoder(64, int) - -# Use these versions for values which must be limited to 32 bits. -_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) -_DecodeSignedVarint32 = _SignedVarintDecoder(32, int) - - -def ReadTag(buffer, pos): - """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple. - - We return the raw bytes of the tag rather than decoding them. The raw - bytes can then be used to look up the proper decoder. This effectively allows - us to trade some work that would be done in pure-python (decoding a varint) - for work that is done in C (searching for a byte string in a hash table). - In a low-level language it would be much cheaper to decode the varint and - use that, but not in Python. - - Args: - buffer: memoryview object of the encoded bytes - pos: int of the current position to start from - - Returns: - Tuple[bytes, int] of the tag data and new position. - """ - start = pos - while buffer[pos] & 0x80: - pos += 1 - pos += 1 - - tag_bytes = buffer[start:pos].tobytes() - return tag_bytes, pos - - -# -------------------------------------------------------------------- - - -def _SimpleDecoder(wire_type, decode_value): - """Return a constructor for a decoder for fields of a particular type. - - Args: - wire_type: The field's wire type. - decode_value: A function which decodes an individual value, e.g. - _DecodeVarint() - """ - - def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default, - clear_if_default=False): - if is_packed: - local_DecodeVarint = _DecodeVarint - def DecodePackedField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - (endpoint, pos) = local_DecodeVarint(buffer, pos) - endpoint += pos - if endpoint > end: - raise _DecodeError('Truncated message.') - while pos < endpoint: - (element, pos) = decode_value(buffer, pos) - value.append(element) - if pos > endpoint: - del value[-1] # Discard corrupt value. - raise _DecodeError('Packed element was truncated.') - return pos - return DecodePackedField - elif is_repeated: - tag_bytes = encoder.TagBytes(field_number, wire_type) - tag_len = len(tag_bytes) - def DecodeRepeatedField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - (element, new_pos) = decode_value(buffer, pos) - value.append(element) - # Predict that the next tag is another copy of the same repeated - # field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos >= end: - # Prediction failed. Return. - if new_pos > end: - raise _DecodeError('Truncated message.') - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - (new_value, pos) = decode_value(buffer, pos) - if pos > end: - raise _DecodeError('Truncated message.') - if clear_if_default and not new_value: - field_dict.pop(key, None) - else: - field_dict[key] = new_value - return pos - return DecodeField - - return SpecificDecoder - - -def _ModifiedDecoder(wire_type, decode_value, modify_value): - """Like SimpleDecoder but additionally invokes modify_value on every value - before storing it. Usually modify_value is ZigZagDecode. - """ - - # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but - # not enough to make a significant difference. - - def InnerDecode(buffer, pos): - (result, new_pos) = decode_value(buffer, pos) - return (modify_value(result), new_pos) - return _SimpleDecoder(wire_type, InnerDecode) - - -def _StructPackDecoder(wire_type, format): - """Return a constructor for a decoder for a fixed-width field. - - Args: - wire_type: The field's wire type. - format: The format string to pass to struct.unpack(). - """ - - value_size = struct.calcsize(format) - local_unpack = struct.unpack - - # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but - # not enough to make a significant difference. - - # Note that we expect someone up-stack to catch struct.error and convert - # it to _DecodeError -- this way we don't have to set up exception- - # handling blocks every time we parse one value. - - def InnerDecode(buffer, pos): - new_pos = pos + value_size - result = local_unpack(format, buffer[pos:new_pos])[0] - return (result, new_pos) - return _SimpleDecoder(wire_type, InnerDecode) - - -def _FloatDecoder(): - """Returns a decoder for a float field. - - This code works around a bug in struct.unpack for non-finite 32-bit - floating-point values. - """ - - local_unpack = struct.unpack - - def InnerDecode(buffer, pos): - """Decode serialized float to a float and new position. - - Args: - buffer: memoryview of the serialized bytes - pos: int, position in the memory view to start at. - - Returns: - Tuple[float, int] of the deserialized float value and new position - in the serialized data. - """ - # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign - # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. - new_pos = pos + 4 - float_bytes = buffer[pos:new_pos].tobytes() - - # If this value has all its exponent bits set, then it's non-finite. - # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. - # To avoid that, we parse it specially. - if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): - # If at least one significand bit is set... - if float_bytes[0:3] != b'\x00\x00\x80': - return (math.nan, new_pos) - # If sign bit is set... - if float_bytes[3:4] == b'\xFF': - return (-math.inf, new_pos) - return (math.inf, new_pos) - - # Note that we expect someone up-stack to catch struct.error and convert - # it to _DecodeError -- this way we don't have to set up exception- - # handling blocks every time we parse one value. - result = local_unpack('= b'\xF0') - and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): - return (math.nan, new_pos) - - # Note that we expect someone up-stack to catch struct.error and convert - # it to _DecodeError -- this way we don't have to set up exception- - # handling blocks every time we parse one value. - result = local_unpack(' end: - raise _DecodeError('Truncated message.') - while pos < endpoint: - value_start_pos = pos - (element, pos) = _DecodeSignedVarint32(buffer, pos) - # pylint: disable=protected-access - if element in enum_type.values_by_number: - value.append(element) - else: - if not message._unknown_fields: - message._unknown_fields = [] - tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_VARINT) - - message._unknown_fields.append( - (tag_bytes, buffer[value_start_pos:pos].tobytes())) - if message._unknown_field_set is None: - message._unknown_field_set = containers.UnknownFieldSet() - message._unknown_field_set._add( - field_number, wire_format.WIRETYPE_VARINT, element) - # pylint: enable=protected-access - if pos > endpoint: - if element in enum_type.values_by_number: - del value[-1] # Discard corrupt value. - else: - del message._unknown_fields[-1] - # pylint: disable=protected-access - del message._unknown_field_set._values[-1] - # pylint: enable=protected-access - raise _DecodeError('Packed element was truncated.') - return pos - return DecodePackedField - elif is_repeated: - tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT) - tag_len = len(tag_bytes) - def DecodeRepeatedField(buffer, pos, end, message, field_dict): - """Decode serialized repeated enum to its value and a new position. - - Args: - buffer: memoryview of the serialized bytes. - pos: int, position in the memory view to start at. - end: int, end position of serialized data - message: Message object to store unknown fields in - field_dict: Map[Descriptor, Any] to store decoded values in. - - Returns: - int, new position in serialized data. - """ - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - (element, new_pos) = _DecodeSignedVarint32(buffer, pos) - # pylint: disable=protected-access - if element in enum_type.values_by_number: - value.append(element) - else: - if not message._unknown_fields: - message._unknown_fields = [] - message._unknown_fields.append( - (tag_bytes, buffer[pos:new_pos].tobytes())) - if message._unknown_field_set is None: - message._unknown_field_set = containers.UnknownFieldSet() - message._unknown_field_set._add( - field_number, wire_format.WIRETYPE_VARINT, element) - # pylint: enable=protected-access - # Predict that the next tag is another copy of the same repeated - # field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos >= end: - # Prediction failed. Return. - if new_pos > end: - raise _DecodeError('Truncated message.') - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - """Decode serialized repeated enum to its value and a new position. - - Args: - buffer: memoryview of the serialized bytes. - pos: int, position in the memory view to start at. - end: int, end position of serialized data - message: Message object to store unknown fields in - field_dict: Map[Descriptor, Any] to store decoded values in. - - Returns: - int, new position in serialized data. - """ - value_start_pos = pos - (enum_value, pos) = _DecodeSignedVarint32(buffer, pos) - if pos > end: - raise _DecodeError('Truncated message.') - if clear_if_default and not enum_value: - field_dict.pop(key, None) - return pos - # pylint: disable=protected-access - if enum_value in enum_type.values_by_number: - field_dict[key] = enum_value - else: - if not message._unknown_fields: - message._unknown_fields = [] - tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_VARINT) - message._unknown_fields.append( - (tag_bytes, buffer[value_start_pos:pos].tobytes())) - if message._unknown_field_set is None: - message._unknown_field_set = containers.UnknownFieldSet() - message._unknown_field_set._add( - field_number, wire_format.WIRETYPE_VARINT, enum_value) - # pylint: enable=protected-access - return pos - return DecodeField - - -# -------------------------------------------------------------------- - - -Int32Decoder = _SimpleDecoder( - wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) - -Int64Decoder = _SimpleDecoder( - wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) - -UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) -UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) - -SInt32Decoder = _ModifiedDecoder( - wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) -SInt64Decoder = _ModifiedDecoder( - wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) - -# Note that Python conveniently guarantees that when using the '<' prefix on -# formats, they will also have the same size across all platforms (as opposed -# to without the prefix, where their sizes depend on the C compiler's basic -# type sizes). -Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end: - raise _DecodeError('Truncated string.') - value.append(_ConvertToUnicode(buffer[pos:new_pos])) - # Predict that the next tag is another copy of the same repeated field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos == end: - # Prediction failed. Return. - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated string.') - if clear_if_default and not size: - field_dict.pop(key, None) - else: - field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos]) - return new_pos - return DecodeField - - -def BytesDecoder(field_number, is_repeated, is_packed, key, new_default, - clear_if_default=False): - """Returns a decoder for a bytes field.""" - - local_DecodeVarint = _DecodeVarint - - assert not is_packed - if is_repeated: - tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_LENGTH_DELIMITED) - tag_len = len(tag_bytes) - def DecodeRepeatedField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated string.') - value.append(buffer[pos:new_pos].tobytes()) - # Predict that the next tag is another copy of the same repeated field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos == end: - # Prediction failed. Return. - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated string.') - if clear_if_default and not size: - field_dict.pop(key, None) - else: - field_dict[key] = buffer[pos:new_pos].tobytes() - return new_pos - return DecodeField - - -def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): - """Returns a decoder for a group field.""" - - end_tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_END_GROUP) - end_tag_len = len(end_tag_bytes) - - assert not is_packed - if is_repeated: - tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_START_GROUP) - tag_len = len(tag_bytes) - def DecodeRepeatedField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - # Read sub-message. - pos = value.add()._InternalParse(buffer, pos, end) - # Read end tag. - new_pos = pos+end_tag_len - if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: - raise _DecodeError('Missing group end tag.') - # Predict that the next tag is another copy of the same repeated field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos == end: - # Prediction failed. Return. - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - # Read sub-message. - pos = value._InternalParse(buffer, pos, end) - # Read end tag. - new_pos = pos+end_tag_len - if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: - raise _DecodeError('Missing group end tag.') - return new_pos - return DecodeField - - -def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): - """Returns a decoder for a message field.""" - - local_DecodeVarint = _DecodeVarint - - assert not is_packed - if is_repeated: - tag_bytes = encoder.TagBytes(field_number, - wire_format.WIRETYPE_LENGTH_DELIMITED) - tag_len = len(tag_bytes) - def DecodeRepeatedField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - # Read length. - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated message.') - # Read sub-message. - if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: - # The only reason _InternalParse would return early is if it - # encountered an end-group tag. - raise _DecodeError('Unexpected end-group tag.') - # Predict that the next tag is another copy of the same repeated field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos == end: - # Prediction failed. Return. - return new_pos - return DecodeRepeatedField - else: - def DecodeField(buffer, pos, end, message, field_dict): - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - # Read length. - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated message.') - # Read sub-message. - if value._InternalParse(buffer, pos, new_pos) != new_pos: - # The only reason _InternalParse would return early is if it encountered - # an end-group tag. - raise _DecodeError('Unexpected end-group tag.') - return new_pos - return DecodeField - - -# -------------------------------------------------------------------- - -MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) - -def MessageSetItemDecoder(descriptor): - """Returns a decoder for a MessageSet item. - - The parameter is the message Descriptor. - - The message set message looks like this: - message MessageSet { - repeated group Item = 1 { - required int32 type_id = 2; - required string message = 3; - } - } - """ - - type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) - message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) - item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) - - local_ReadTag = ReadTag - local_DecodeVarint = _DecodeVarint - local_SkipField = SkipField - - def DecodeItem(buffer, pos, end, message, field_dict): - """Decode serialized message set to its value and new position. - - Args: - buffer: memoryview of the serialized bytes. - pos: int, position in the memory view to start at. - end: int, end position of serialized data - message: Message object to store unknown fields in - field_dict: Map[Descriptor, Any] to store decoded values in. - - Returns: - int, new position in serialized data. - """ - message_set_item_start = pos - type_id = -1 - message_start = -1 - message_end = -1 - - # Technically, type_id and message can appear in any order, so we need - # a little loop here. - while 1: - (tag_bytes, pos) = local_ReadTag(buffer, pos) - if tag_bytes == type_id_tag_bytes: - (type_id, pos) = local_DecodeVarint(buffer, pos) - elif tag_bytes == message_tag_bytes: - (size, message_start) = local_DecodeVarint(buffer, pos) - pos = message_end = message_start + size - elif tag_bytes == item_end_tag_bytes: - break - else: - pos = SkipField(buffer, pos, end, tag_bytes) - if pos == -1: - raise _DecodeError('Missing group end tag.') - - if pos > end: - raise _DecodeError('Truncated message.') - - if type_id == -1: - raise _DecodeError('MessageSet item missing type_id.') - if message_start == -1: - raise _DecodeError('MessageSet item missing message.') - - extension = message.Extensions._FindExtensionByNumber(type_id) - # pylint: disable=protected-access - if extension is not None: - value = field_dict.get(extension) - if value is None: - message_type = extension.message_type - if not hasattr(message_type, '_concrete_class'): - # pylint: disable=protected-access - message._FACTORY.GetPrototype(message_type) - value = field_dict.setdefault( - extension, message_type._concrete_class()) - if value._InternalParse(buffer, message_start,message_end) != message_end: - # The only reason _InternalParse would return early is if it encountered - # an end-group tag. - raise _DecodeError('Unexpected end-group tag.') - else: - if not message._unknown_fields: - message._unknown_fields = [] - message._unknown_fields.append( - (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes())) - if message._unknown_field_set is None: - message._unknown_field_set = containers.UnknownFieldSet() - message._unknown_field_set._add( - type_id, - wire_format.WIRETYPE_LENGTH_DELIMITED, - buffer[message_start:message_end].tobytes()) - # pylint: enable=protected-access - - return pos - - return DecodeItem - -# -------------------------------------------------------------------- - -def MapDecoder(field_descriptor, new_default, is_message_map): - """Returns a decoder for a map field.""" - - key = field_descriptor - tag_bytes = encoder.TagBytes(field_descriptor.number, - wire_format.WIRETYPE_LENGTH_DELIMITED) - tag_len = len(tag_bytes) - local_DecodeVarint = _DecodeVarint - # Can't read _concrete_class yet; might not be initialized. - message_type = field_descriptor.message_type - - def DecodeMap(buffer, pos, end, message, field_dict): - submsg = message_type._concrete_class() - value = field_dict.get(key) - if value is None: - value = field_dict.setdefault(key, new_default(message)) - while 1: - # Read length. - (size, pos) = local_DecodeVarint(buffer, pos) - new_pos = pos + size - if new_pos > end: - raise _DecodeError('Truncated message.') - # Read sub-message. - submsg.Clear() - if submsg._InternalParse(buffer, pos, new_pos) != new_pos: - # The only reason _InternalParse would return early is if it - # encountered an end-group tag. - raise _DecodeError('Unexpected end-group tag.') - - if is_message_map: - value[submsg.key].CopyFrom(submsg.value) - else: - value[submsg.key] = submsg.value - - # Predict that the next tag is another copy of the same repeated field. - pos = new_pos + tag_len - if buffer[new_pos:pos] != tag_bytes or new_pos == end: - # Prediction failed. Return. - return new_pos - - return DecodeMap - -# -------------------------------------------------------------------- -# Optimization is not as heavy here because calls to SkipField() are rare, -# except for handling end-group tags. - -def _SkipVarint(buffer, pos, end): - """Skip a varint value. Returns the new position.""" - # Previously ord(buffer[pos]) raised IndexError when pos is out of range. - # With this code, ord(b'') raises TypeError. Both are handled in - # python_message.py to generate a 'Truncated message' error. - while ord(buffer[pos:pos+1].tobytes()) & 0x80: - pos += 1 - pos += 1 - if pos > end: - raise _DecodeError('Truncated message.') - return pos - -def _SkipFixed64(buffer, pos, end): - """Skip a fixed64 value. Returns the new position.""" - - pos += 8 - if pos > end: - raise _DecodeError('Truncated message.') - return pos - - -def _DecodeFixed64(buffer, pos): - """Decode a fixed64.""" - new_pos = pos + 8 - return (struct.unpack(' end: - raise _DecodeError('Truncated message.') - return pos - - -def _SkipGroup(buffer, pos, end): - """Skip sub-group. Returns the new position.""" - - while 1: - (tag_bytes, pos) = ReadTag(buffer, pos) - new_pos = SkipField(buffer, pos, end, tag_bytes) - if new_pos == -1: - return pos - pos = new_pos - - -def _DecodeUnknownFieldSet(buffer, pos, end_pos=None): - """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position.""" - - unknown_field_set = containers.UnknownFieldSet() - while end_pos is None or pos < end_pos: - (tag_bytes, pos) = ReadTag(buffer, pos) - (tag, _) = _DecodeVarint(tag_bytes, 0) - field_number, wire_type = wire_format.UnpackTag(tag) - if wire_type == wire_format.WIRETYPE_END_GROUP: - break - (data, pos) = _DecodeUnknownField(buffer, pos, wire_type) - # pylint: disable=protected-access - unknown_field_set._add(field_number, wire_type, data) - - return (unknown_field_set, pos) - - -def _DecodeUnknownField(buffer, pos, wire_type): - """Decode a unknown field. Returns the UnknownField and new position.""" - - if wire_type == wire_format.WIRETYPE_VARINT: - (data, pos) = _DecodeVarint(buffer, pos) - elif wire_type == wire_format.WIRETYPE_FIXED64: - (data, pos) = _DecodeFixed64(buffer, pos) - elif wire_type == wire_format.WIRETYPE_FIXED32: - (data, pos) = _DecodeFixed32(buffer, pos) - elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: - (size, pos) = _DecodeVarint(buffer, pos) - data = buffer[pos:pos+size].tobytes() - pos += size - elif wire_type == wire_format.WIRETYPE_START_GROUP: - (data, pos) = _DecodeUnknownFieldSet(buffer, pos) - elif wire_type == wire_format.WIRETYPE_END_GROUP: - return (0, -1) - else: - raise _DecodeError('Wrong wire type in tag.') - - return (data, pos) - - -def _EndGroup(buffer, pos, end): - """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" - - return -1 - - -def _SkipFixed32(buffer, pos, end): - """Skip a fixed32 value. Returns the new position.""" - - pos += 4 - if pos > end: - raise _DecodeError('Truncated message.') - return pos - - -def _DecodeFixed32(buffer, pos): - """Decode a fixed32.""" - - new_pos = pos + 4 - return (struct.unpack('B').pack - - def EncodeVarint(write, value, unused_deterministic=None): - bits = value & 0x7f - value >>= 7 - while value: - write(local_int2byte(0x80|bits)) - bits = value & 0x7f - value >>= 7 - return write(local_int2byte(bits)) - - return EncodeVarint - - -def _SignedVarintEncoder(): - """Return an encoder for a basic signed varint value (does not include - tag).""" - - local_int2byte = struct.Struct('>B').pack - - def EncodeSignedVarint(write, value, unused_deterministic=None): - if value < 0: - value += (1 << 64) - bits = value & 0x7f - value >>= 7 - while value: - write(local_int2byte(0x80|bits)) - bits = value & 0x7f - value >>= 7 - return write(local_int2byte(bits)) - - return EncodeSignedVarint - - -_EncodeVarint = _VarintEncoder() -_EncodeSignedVarint = _SignedVarintEncoder() - - -def _VarintBytes(value): - """Encode the given integer as a varint and return the bytes. This is only - called at startup time so it doesn't need to be fast.""" - - pieces = [] - _EncodeVarint(pieces.append, value, True) - return b"".join(pieces) - - -def TagBytes(field_number, wire_type): - """Encode the given tag and return the bytes. Only called at startup.""" - - return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) - -# -------------------------------------------------------------------- -# As with sizers (see above), we have a number of common encoder -# implementations. - - -def _SimpleEncoder(wire_type, encode_value, compute_value_size): - """Return a constructor for an encoder for fields of a particular type. - - Args: - wire_type: The field's wire type, for encoding tags. - encode_value: A function which encodes an individual value, e.g. - _EncodeVarint(). - compute_value_size: A function which computes the size of an individual - value, e.g. _VarintSize(). - """ - - def SpecificEncoder(field_number, is_repeated, is_packed): - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - size = 0 - for element in value: - size += compute_value_size(element) - local_EncodeVarint(write, size, deterministic) - for element in value: - encode_value(write, element, deterministic) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, deterministic): - for element in value: - write(tag_bytes) - encode_value(write, element, deterministic) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, deterministic): - write(tag_bytes) - return encode_value(write, value, deterministic) - return EncodeField - - return SpecificEncoder - - -def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): - """Like SimpleEncoder but additionally invokes modify_value on every value - before passing it to encode_value. Usually modify_value is ZigZagEncode.""" - - def SpecificEncoder(field_number, is_repeated, is_packed): - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - size = 0 - for element in value: - size += compute_value_size(modify_value(element)) - local_EncodeVarint(write, size, deterministic) - for element in value: - encode_value(write, modify_value(element), deterministic) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, deterministic): - for element in value: - write(tag_bytes) - encode_value(write, modify_value(element), deterministic) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, deterministic): - write(tag_bytes) - return encode_value(write, modify_value(value), deterministic) - return EncodeField - - return SpecificEncoder - - -def _StructPackEncoder(wire_type, format): - """Return a constructor for an encoder for a fixed-width field. - - Args: - wire_type: The field's wire type, for encoding tags. - format: The format string to pass to struct.pack(). - """ - - value_size = struct.calcsize(format) - - def SpecificEncoder(field_number, is_repeated, is_packed): - local_struct_pack = struct.pack - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - local_EncodeVarint(write, len(value) * value_size, deterministic) - for element in value: - write(local_struct_pack(format, element)) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, unused_deterministic=None): - for element in value: - write(tag_bytes) - write(local_struct_pack(format, element)) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, unused_deterministic=None): - write(tag_bytes) - return write(local_struct_pack(format, value)) - return EncodeField - - return SpecificEncoder - - -def _FloatingPointEncoder(wire_type, format): - """Return a constructor for an encoder for float fields. - - This is like StructPackEncoder, but catches errors that may be due to - passing non-finite floating-point values to struct.pack, and makes a - second attempt to encode those values. - - Args: - wire_type: The field's wire type, for encoding tags. - format: The format string to pass to struct.pack(). - """ - - value_size = struct.calcsize(format) - if value_size == 4: - def EncodeNonFiniteOrRaise(write, value): - # Remember that the serialized form uses little-endian byte order. - if value == _POS_INF: - write(b'\x00\x00\x80\x7F') - elif value == _NEG_INF: - write(b'\x00\x00\x80\xFF') - elif value != value: # NaN - write(b'\x00\x00\xC0\x7F') - else: - raise - elif value_size == 8: - def EncodeNonFiniteOrRaise(write, value): - if value == _POS_INF: - write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') - elif value == _NEG_INF: - write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') - elif value != value: # NaN - write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') - else: - raise - else: - raise ValueError('Can\'t encode floating-point values that are ' - '%d bytes long (only 4 or 8)' % value_size) - - def SpecificEncoder(field_number, is_repeated, is_packed): - local_struct_pack = struct.pack - if is_packed: - tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) - local_EncodeVarint = _EncodeVarint - def EncodePackedField(write, value, deterministic): - write(tag_bytes) - local_EncodeVarint(write, len(value) * value_size, deterministic) - for element in value: - # This try/except block is going to be faster than any code that - # we could write to check whether element is finite. - try: - write(local_struct_pack(format, element)) - except SystemError: - EncodeNonFiniteOrRaise(write, element) - return EncodePackedField - elif is_repeated: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeRepeatedField(write, value, unused_deterministic=None): - for element in value: - write(tag_bytes) - try: - write(local_struct_pack(format, element)) - except SystemError: - EncodeNonFiniteOrRaise(write, element) - return EncodeRepeatedField - else: - tag_bytes = TagBytes(field_number, wire_type) - def EncodeField(write, value, unused_deterministic=None): - write(tag_bytes) - try: - write(local_struct_pack(format, value)) - except SystemError: - EncodeNonFiniteOrRaise(write, value) - return EncodeField - - return SpecificEncoder - - -# ==================================================================== -# Here we declare an encoder constructor for each field type. These work -# very similarly to sizer constructors, described earlier. - - -Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( - wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) - -UInt32Encoder = UInt64Encoder = _SimpleEncoder( - wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) - -SInt32Encoder = SInt64Encoder = _ModifiedEncoder( - wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, - wire_format.ZigZagEncode) - -# Note that Python conveniently guarantees that when using the '<' prefix on -# formats, they will also have the same size across all platforms (as opposed -# to without the prefix, where their sizes depend on the C compiler's basic -# type sizes). -Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, ' str - ValueType = int - - def __init__(self, enum_type): - """Inits EnumTypeWrapper with an EnumDescriptor.""" - self._enum_type = enum_type - self.DESCRIPTOR = enum_type # pylint: disable=invalid-name - - def Name(self, number): # pylint: disable=invalid-name - """Returns a string containing the name of an enum value.""" - try: - return self._enum_type.values_by_number[number].name - except KeyError: - pass # fall out to break exception chaining - - if not isinstance(number, int): - raise TypeError( - 'Enum value for {} must be an int, but got {} {!r}.'.format( - self._enum_type.name, type(number), number)) - else: - # repr here to handle the odd case when you pass in a boolean. - raise ValueError('Enum {} has no name defined for value {!r}'.format( - self._enum_type.name, number)) - - def Value(self, name): # pylint: disable=invalid-name - """Returns the value corresponding to the given enum name.""" - try: - return self._enum_type.values_by_name[name].number - except KeyError: - pass # fall out to break exception chaining - raise ValueError('Enum {} has no value defined for name {!r}'.format( - self._enum_type.name, name)) - - def keys(self): - """Return a list of the string names in the enum. - - Returns: - A list of strs, in the order they were defined in the .proto file. - """ - - return [value_descriptor.name - for value_descriptor in self._enum_type.values] - - def values(self): - """Return a list of the integer values in the enum. - - Returns: - A list of ints, in the order they were defined in the .proto file. - """ - - return [value_descriptor.number - for value_descriptor in self._enum_type.values] - - def items(self): - """Return a list of the (name, value) pairs of the enum. - - Returns: - A list of (str, int) pairs, in the order they were defined - in the .proto file. - """ - return [(value_descriptor.name, value_descriptor.number) - for value_descriptor in self._enum_type.values] - - def __getattr__(self, name): - """Returns the value corresponding to the given enum name.""" - try: - return super( - EnumTypeWrapper, - self).__getattribute__('_enum_type').values_by_name[name].number - except KeyError: - pass # fall out to break exception chaining - raise AttributeError('Enum {} has no value defined for name {!r}'.format( - self._enum_type.name, name)) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/extension_dict.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/extension_dict.py deleted file mode 100644 index b346cf283e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/extension_dict.py +++ /dev/null @@ -1,213 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains _ExtensionDict class to represent extensions. -""" - -from google.protobuf.internal import type_checkers -from google.protobuf.descriptor import FieldDescriptor - - -def _VerifyExtensionHandle(message, extension_handle): - """Verify that the given extension handle is valid.""" - - if not isinstance(extension_handle, FieldDescriptor): - raise KeyError('HasExtension() expects an extension handle, got: %s' % - extension_handle) - - if not extension_handle.is_extension: - raise KeyError('"%s" is not an extension.' % extension_handle.full_name) - - if not extension_handle.containing_type: - raise KeyError('"%s" is missing a containing_type.' - % extension_handle.full_name) - - if extension_handle.containing_type is not message.DESCRIPTOR: - raise KeyError('Extension "%s" extends message type "%s", but this ' - 'message is of type "%s".' % - (extension_handle.full_name, - extension_handle.containing_type.full_name, - message.DESCRIPTOR.full_name)) - - -# TODO(robinson): Unify error handling of "unknown extension" crap. -# TODO(robinson): Support iteritems()-style iteration over all -# extensions with the "has" bits turned on? -class _ExtensionDict(object): - - """Dict-like container for Extension fields on proto instances. - - Note that in all cases we expect extension handles to be - FieldDescriptors. - """ - - def __init__(self, extended_message): - """ - Args: - extended_message: Message instance for which we are the Extensions dict. - """ - self._extended_message = extended_message - - def __getitem__(self, extension_handle): - """Returns the current value of the given extension handle.""" - - _VerifyExtensionHandle(self._extended_message, extension_handle) - - result = self._extended_message._fields.get(extension_handle) - if result is not None: - return result - - if extension_handle.label == FieldDescriptor.LABEL_REPEATED: - result = extension_handle._default_constructor(self._extended_message) - elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: - message_type = extension_handle.message_type - if not hasattr(message_type, '_concrete_class'): - # pylint: disable=protected-access - self._extended_message._FACTORY.GetPrototype(message_type) - assert getattr(extension_handle.message_type, '_concrete_class', None), ( - 'Uninitialized concrete class found for field %r (message type %r)' - % (extension_handle.full_name, - extension_handle.message_type.full_name)) - result = extension_handle.message_type._concrete_class() - try: - result._SetListener(self._extended_message._listener_for_children) - except ReferenceError: - pass - else: - # Singular scalar -- just return the default without inserting into the - # dict. - return extension_handle.default_value - - # Atomically check if another thread has preempted us and, if not, swap - # in the new object we just created. If someone has preempted us, we - # take that object and discard ours. - # WARNING: We are relying on setdefault() being atomic. This is true - # in CPython but we haven't investigated others. This warning appears - # in several other locations in this file. - result = self._extended_message._fields.setdefault( - extension_handle, result) - - return result - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return False - - my_fields = self._extended_message.ListFields() - other_fields = other._extended_message.ListFields() - - # Get rid of non-extension fields. - my_fields = [field for field in my_fields if field.is_extension] - other_fields = [field for field in other_fields if field.is_extension] - - return my_fields == other_fields - - def __ne__(self, other): - return not self == other - - def __len__(self): - fields = self._extended_message.ListFields() - # Get rid of non-extension fields. - extension_fields = [field for field in fields if field[0].is_extension] - return len(extension_fields) - - def __hash__(self): - raise TypeError('unhashable object') - - # Note that this is only meaningful for non-repeated, scalar extension - # fields. Note also that we may have to call _Modified() when we do - # successfully set a field this way, to set any necessary "has" bits in the - # ancestors of the extended message. - def __setitem__(self, extension_handle, value): - """If extension_handle specifies a non-repeated, scalar extension - field, sets the value of that field. - """ - - _VerifyExtensionHandle(self._extended_message, extension_handle) - - if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or - extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): - raise TypeError( - 'Cannot assign to extension "%s" because it is a repeated or ' - 'composite type.' % extension_handle.full_name) - - # It's slightly wasteful to lookup the type checker each time, - # but we expect this to be a vanishingly uncommon case anyway. - type_checker = type_checkers.GetTypeChecker(extension_handle) - # pylint: disable=protected-access - self._extended_message._fields[extension_handle] = ( - type_checker.CheckValue(value)) - self._extended_message._Modified() - - def __delitem__(self, extension_handle): - self._extended_message.ClearExtension(extension_handle) - - def _FindExtensionByName(self, name): - """Tries to find a known extension with the specified name. - - Args: - name: Extension full name. - - Returns: - Extension field descriptor. - """ - return self._extended_message._extensions_by_name.get(name, None) - - def _FindExtensionByNumber(self, number): - """Tries to find a known extension with the field number. - - Args: - number: Extension field number. - - Returns: - Extension field descriptor. - """ - return self._extended_message._extensions_by_number.get(number, None) - - def __iter__(self): - # Return a generator over the populated extension fields - return (f[0] for f in self._extended_message.ListFields() - if f[0].is_extension) - - def __contains__(self, extension_handle): - _VerifyExtensionHandle(self._extended_message, extension_handle) - - if extension_handle not in self._extended_message._fields: - return False - - if extension_handle.label == FieldDescriptor.LABEL_REPEATED: - return bool(self._extended_message._fields.get(extension_handle)) - - if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: - value = self._extended_message._fields.get(extension_handle) - # pylint: disable=protected-access - return value is not None and value._is_present_in_parent - - return True diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_listener.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_listener.py deleted file mode 100644 index 0fc255a774..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_listener.py +++ /dev/null @@ -1,78 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Defines a listener interface for observing certain -state transitions on Message objects. - -Also defines a null implementation of this interface. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - - -class MessageListener(object): - - """Listens for modifications made to a message. Meant to be registered via - Message._SetListener(). - - Attributes: - dirty: If True, then calling Modified() would be a no-op. This can be - used to avoid these calls entirely in the common case. - """ - - def Modified(self): - """Called every time the message is modified in such a way that the parent - message may need to be updated. This currently means either: - (a) The message was modified for the first time, so the parent message - should henceforth mark the message as present. - (b) The message's cached byte size became dirty -- i.e. the message was - modified for the first time after a previous call to ByteSize(). - Therefore the parent should also mark its byte size as dirty. - Note that (a) implies (b), since new objects start out with a client cached - size (zero). However, we document (a) explicitly because it is important. - - Modified() will *only* be called in response to one of these two events -- - not every time the sub-message is modified. - - Note that if the listener's |dirty| attribute is true, then calling - Modified at the moment would be a no-op, so it can be skipped. Performance- - sensitive callers should check this attribute directly before calling since - it will be true most of the time. - """ - - raise NotImplementedError - - -class NullMessageListener(object): - - """No-op MessageListener implementation.""" - - def Modified(self): - pass diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py deleted file mode 100644 index 63651a3f19..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/message_set_extensions_pb2.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/message_set_extensions.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.message_set_extensions_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - TestMessageSet.RegisterExtension(message_set_extension3) - TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension']) - TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension']) - - DESCRIPTOR._options = None - _TESTMESSAGESET._options = None - _TESTMESSAGESET._serialized_options = b'\010\001' - _TESTMESSAGESET._serialized_start=83 - _TESTMESSAGESET._serialized_end=113 - _TESTMESSAGESETEXTENSION1._serialized_start=116 - _TESTMESSAGESETEXTENSION1._serialized_end=281 - _TESTMESSAGESETEXTENSION2._serialized_start=284 - _TESTMESSAGESETEXTENSION2._serialized_end=451 - _TESTMESSAGESETEXTENSION3._serialized_start=453 - _TESTMESSAGESETEXTENSION3._serialized_end=493 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py deleted file mode 100644 index 5497083197..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/missing_enum_values_pb2.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/missing_enum_values.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.missing_enum_values_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None - _TESTENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' - _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None - _TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._serialized_options = b'\020\001' - _TESTENUMVALUES._serialized_start=88 - _TESTENUMVALUES._serialized_end=409 - _TESTENUMVALUES_NESTEDENUM._serialized_start=378 - _TESTENUMVALUES_NESTEDENUM._serialized_end=409 - _TESTMISSINGENUMVALUES._serialized_start=412 - _TESTMISSINGENUMVALUES._serialized_end=751 - _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_start=730 - _TESTMISSINGENUMVALUES_NESTEDENUM._serialized_end=751 - _JUSTSTRING._serialized_start=753 - _JUSTSTRING._serialized_end=780 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py deleted file mode 100644 index 0953706bac..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_dynamic_pb2.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/more_extensions_dynamic.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_dynamic_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension) - google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension) - google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension) - - DESCRIPTOR._options = None - _DYNAMICMESSAGETYPE._serialized_start=132 - _DYNAMICMESSAGETYPE._serialized_end=163 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_pb2.py deleted file mode 100644 index 1cfa1b7c8b..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_extensions_pb2.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/more_extensions.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"\x99\x01\n\x0fTopLevelMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\x12\x43\n\x0enested_message\x18\x02 \x01(\x0b\x32\'.google.protobuf.internal.NestedMessageB\x02(\x01\"R\n\rNestedMessage\x12\x41\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessageB\x02(\x01\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_extensions_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - ExtendedMessage.RegisterExtension(optional_int_extension) - ExtendedMessage.RegisterExtension(optional_message_extension) - ExtendedMessage.RegisterExtension(repeated_int_extension) - ExtendedMessage.RegisterExtension(repeated_message_extension) - - DESCRIPTOR._options = None - _TOPLEVELMESSAGE.fields_by_name['submessage']._options = None - _TOPLEVELMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' - _TOPLEVELMESSAGE.fields_by_name['nested_message']._options = None - _TOPLEVELMESSAGE.fields_by_name['nested_message']._serialized_options = b'(\001' - _NESTEDMESSAGE.fields_by_name['submessage']._options = None - _NESTEDMESSAGE.fields_by_name['submessage']._serialized_options = b'(\001' - _TOPLEVELMESSAGE._serialized_start=77 - _TOPLEVELMESSAGE._serialized_end=230 - _NESTEDMESSAGE._serialized_start=232 - _NESTEDMESSAGE._serialized_end=314 - _EXTENDEDMESSAGE._serialized_start=316 - _EXTENDEDMESSAGE._serialized_end=391 - _FOREIGNMESSAGE._serialized_start=393 - _FOREIGNMESSAGE._serialized_end=438 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_messages_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_messages_pb2.py deleted file mode 100644 index d7f7115609..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/more_messages_pb2.py +++ /dev/null @@ -1,556 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/more_messages.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,google/protobuf/internal/more_messages.proto\x12\x18google.protobuf.internal\"h\n\x10OutOfOrderFields\x12\x17\n\x0foptional_sint32\x18\x05 \x01(\x11\x12\x17\n\x0foptional_uint32\x18\x03 \x01(\r\x12\x16\n\x0eoptional_int32\x18\x01 \x01(\x05*\x04\x08\x04\x10\x05*\x04\x08\x02\x10\x03\"\xcd\x02\n\x05\x63lass\x12\x1b\n\tint_field\x18\x01 \x01(\x05R\x08json_int\x12\n\n\x02if\x18\x02 \x01(\x05\x12(\n\x02\x61s\x18\x03 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12\x30\n\nenum_field\x18\x04 \x01(\x0e\x32\x1c.google.protobuf.internal.is\x12>\n\x11nested_enum_field\x18\x05 \x01(\x0e\x32#.google.protobuf.internal.class.for\x12;\n\x0enested_message\x18\x06 \x01(\x0b\x32#.google.protobuf.internal.class.try\x1a\x1c\n\x03try\x12\r\n\x05\x66ield\x18\x01 \x01(\x05*\x06\x08\xe7\x07\x10\x90N\"\x1c\n\x03\x66or\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04True\x10\x01*\x06\x08\xe7\x07\x10\x90N\"?\n\x0b\x45xtendClass20\n\x06return\x12\x1f.google.protobuf.internal.class\x18\xea\x07 \x01(\x05\"~\n\x0fTestFullKeyword\x12:\n\x06\x66ield1\x18\x01 \x01(\x0b\x32*.google.protobuf.internal.OutOfOrderFields\x12/\n\x06\x66ield2\x18\x02 \x01(\x0b\x32\x1f.google.protobuf.internal.class\"\xa5\x0f\n\x11LotsNestedMessage\x1a\x04\n\x02\x42\x30\x1a\x04\n\x02\x42\x31\x1a\x04\n\x02\x42\x32\x1a\x04\n\x02\x42\x33\x1a\x04\n\x02\x42\x34\x1a\x04\n\x02\x42\x35\x1a\x04\n\x02\x42\x36\x1a\x04\n\x02\x42\x37\x1a\x04\n\x02\x42\x38\x1a\x04\n\x02\x42\x39\x1a\x05\n\x03\x42\x31\x30\x1a\x05\n\x03\x42\x31\x31\x1a\x05\n\x03\x42\x31\x32\x1a\x05\n\x03\x42\x31\x33\x1a\x05\n\x03\x42\x31\x34\x1a\x05\n\x03\x42\x31\x35\x1a\x05\n\x03\x42\x31\x36\x1a\x05\n\x03\x42\x31\x37\x1a\x05\n\x03\x42\x31\x38\x1a\x05\n\x03\x42\x31\x39\x1a\x05\n\x03\x42\x32\x30\x1a\x05\n\x03\x42\x32\x31\x1a\x05\n\x03\x42\x32\x32\x1a\x05\n\x03\x42\x32\x33\x1a\x05\n\x03\x42\x32\x34\x1a\x05\n\x03\x42\x32\x35\x1a\x05\n\x03\x42\x32\x36\x1a\x05\n\x03\x42\x32\x37\x1a\x05\n\x03\x42\x32\x38\x1a\x05\n\x03\x42\x32\x39\x1a\x05\n\x03\x42\x33\x30\x1a\x05\n\x03\x42\x33\x31\x1a\x05\n\x03\x42\x33\x32\x1a\x05\n\x03\x42\x33\x33\x1a\x05\n\x03\x42\x33\x34\x1a\x05\n\x03\x42\x33\x35\x1a\x05\n\x03\x42\x33\x36\x1a\x05\n\x03\x42\x33\x37\x1a\x05\n\x03\x42\x33\x38\x1a\x05\n\x03\x42\x33\x39\x1a\x05\n\x03\x42\x34\x30\x1a\x05\n\x03\x42\x34\x31\x1a\x05\n\x03\x42\x34\x32\x1a\x05\n\x03\x42\x34\x33\x1a\x05\n\x03\x42\x34\x34\x1a\x05\n\x03\x42\x34\x35\x1a\x05\n\x03\x42\x34\x36\x1a\x05\n\x03\x42\x34\x37\x1a\x05\n\x03\x42\x34\x38\x1a\x05\n\x03\x42\x34\x39\x1a\x05\n\x03\x42\x35\x30\x1a\x05\n\x03\x42\x35\x31\x1a\x05\n\x03\x42\x35\x32\x1a\x05\n\x03\x42\x35\x33\x1a\x05\n\x03\x42\x35\x34\x1a\x05\n\x03\x42\x35\x35\x1a\x05\n\x03\x42\x35\x36\x1a\x05\n\x03\x42\x35\x37\x1a\x05\n\x03\x42\x35\x38\x1a\x05\n\x03\x42\x35\x39\x1a\x05\n\x03\x42\x36\x30\x1a\x05\n\x03\x42\x36\x31\x1a\x05\n\x03\x42\x36\x32\x1a\x05\n\x03\x42\x36\x33\x1a\x05\n\x03\x42\x36\x34\x1a\x05\n\x03\x42\x36\x35\x1a\x05\n\x03\x42\x36\x36\x1a\x05\n\x03\x42\x36\x37\x1a\x05\n\x03\x42\x36\x38\x1a\x05\n\x03\x42\x36\x39\x1a\x05\n\x03\x42\x37\x30\x1a\x05\n\x03\x42\x37\x31\x1a\x05\n\x03\x42\x37\x32\x1a\x05\n\x03\x42\x37\x33\x1a\x05\n\x03\x42\x37\x34\x1a\x05\n\x03\x42\x37\x35\x1a\x05\n\x03\x42\x37\x36\x1a\x05\n\x03\x42\x37\x37\x1a\x05\n\x03\x42\x37\x38\x1a\x05\n\x03\x42\x37\x39\x1a\x05\n\x03\x42\x38\x30\x1a\x05\n\x03\x42\x38\x31\x1a\x05\n\x03\x42\x38\x32\x1a\x05\n\x03\x42\x38\x33\x1a\x05\n\x03\x42\x38\x34\x1a\x05\n\x03\x42\x38\x35\x1a\x05\n\x03\x42\x38\x36\x1a\x05\n\x03\x42\x38\x37\x1a\x05\n\x03\x42\x38\x38\x1a\x05\n\x03\x42\x38\x39\x1a\x05\n\x03\x42\x39\x30\x1a\x05\n\x03\x42\x39\x31\x1a\x05\n\x03\x42\x39\x32\x1a\x05\n\x03\x42\x39\x33\x1a\x05\n\x03\x42\x39\x34\x1a\x05\n\x03\x42\x39\x35\x1a\x05\n\x03\x42\x39\x36\x1a\x05\n\x03\x42\x39\x37\x1a\x05\n\x03\x42\x39\x38\x1a\x05\n\x03\x42\x39\x39\x1a\x06\n\x04\x42\x31\x30\x30\x1a\x06\n\x04\x42\x31\x30\x31\x1a\x06\n\x04\x42\x31\x30\x32\x1a\x06\n\x04\x42\x31\x30\x33\x1a\x06\n\x04\x42\x31\x30\x34\x1a\x06\n\x04\x42\x31\x30\x35\x1a\x06\n\x04\x42\x31\x30\x36\x1a\x06\n\x04\x42\x31\x30\x37\x1a\x06\n\x04\x42\x31\x30\x38\x1a\x06\n\x04\x42\x31\x30\x39\x1a\x06\n\x04\x42\x31\x31\x30\x1a\x06\n\x04\x42\x31\x31\x31\x1a\x06\n\x04\x42\x31\x31\x32\x1a\x06\n\x04\x42\x31\x31\x33\x1a\x06\n\x04\x42\x31\x31\x34\x1a\x06\n\x04\x42\x31\x31\x35\x1a\x06\n\x04\x42\x31\x31\x36\x1a\x06\n\x04\x42\x31\x31\x37\x1a\x06\n\x04\x42\x31\x31\x38\x1a\x06\n\x04\x42\x31\x31\x39\x1a\x06\n\x04\x42\x31\x32\x30\x1a\x06\n\x04\x42\x31\x32\x31\x1a\x06\n\x04\x42\x31\x32\x32\x1a\x06\n\x04\x42\x31\x32\x33\x1a\x06\n\x04\x42\x31\x32\x34\x1a\x06\n\x04\x42\x31\x32\x35\x1a\x06\n\x04\x42\x31\x32\x36\x1a\x06\n\x04\x42\x31\x32\x37\x1a\x06\n\x04\x42\x31\x32\x38\x1a\x06\n\x04\x42\x31\x32\x39\x1a\x06\n\x04\x42\x31\x33\x30\x1a\x06\n\x04\x42\x31\x33\x31\x1a\x06\n\x04\x42\x31\x33\x32\x1a\x06\n\x04\x42\x31\x33\x33\x1a\x06\n\x04\x42\x31\x33\x34\x1a\x06\n\x04\x42\x31\x33\x35\x1a\x06\n\x04\x42\x31\x33\x36\x1a\x06\n\x04\x42\x31\x33\x37\x1a\x06\n\x04\x42\x31\x33\x38\x1a\x06\n\x04\x42\x31\x33\x39\x1a\x06\n\x04\x42\x31\x34\x30\x1a\x06\n\x04\x42\x31\x34\x31\x1a\x06\n\x04\x42\x31\x34\x32\x1a\x06\n\x04\x42\x31\x34\x33\x1a\x06\n\x04\x42\x31\x34\x34\x1a\x06\n\x04\x42\x31\x34\x35\x1a\x06\n\x04\x42\x31\x34\x36\x1a\x06\n\x04\x42\x31\x34\x37\x1a\x06\n\x04\x42\x31\x34\x38\x1a\x06\n\x04\x42\x31\x34\x39\x1a\x06\n\x04\x42\x31\x35\x30\x1a\x06\n\x04\x42\x31\x35\x31\x1a\x06\n\x04\x42\x31\x35\x32\x1a\x06\n\x04\x42\x31\x35\x33\x1a\x06\n\x04\x42\x31\x35\x34\x1a\x06\n\x04\x42\x31\x35\x35\x1a\x06\n\x04\x42\x31\x35\x36\x1a\x06\n\x04\x42\x31\x35\x37\x1a\x06\n\x04\x42\x31\x35\x38\x1a\x06\n\x04\x42\x31\x35\x39\x1a\x06\n\x04\x42\x31\x36\x30\x1a\x06\n\x04\x42\x31\x36\x31\x1a\x06\n\x04\x42\x31\x36\x32\x1a\x06\n\x04\x42\x31\x36\x33\x1a\x06\n\x04\x42\x31\x36\x34\x1a\x06\n\x04\x42\x31\x36\x35\x1a\x06\n\x04\x42\x31\x36\x36\x1a\x06\n\x04\x42\x31\x36\x37\x1a\x06\n\x04\x42\x31\x36\x38\x1a\x06\n\x04\x42\x31\x36\x39\x1a\x06\n\x04\x42\x31\x37\x30\x1a\x06\n\x04\x42\x31\x37\x31\x1a\x06\n\x04\x42\x31\x37\x32\x1a\x06\n\x04\x42\x31\x37\x33\x1a\x06\n\x04\x42\x31\x37\x34\x1a\x06\n\x04\x42\x31\x37\x35\x1a\x06\n\x04\x42\x31\x37\x36\x1a\x06\n\x04\x42\x31\x37\x37\x1a\x06\n\x04\x42\x31\x37\x38\x1a\x06\n\x04\x42\x31\x37\x39\x1a\x06\n\x04\x42\x31\x38\x30\x1a\x06\n\x04\x42\x31\x38\x31\x1a\x06\n\x04\x42\x31\x38\x32\x1a\x06\n\x04\x42\x31\x38\x33\x1a\x06\n\x04\x42\x31\x38\x34\x1a\x06\n\x04\x42\x31\x38\x35\x1a\x06\n\x04\x42\x31\x38\x36\x1a\x06\n\x04\x42\x31\x38\x37\x1a\x06\n\x04\x42\x31\x38\x38\x1a\x06\n\x04\x42\x31\x38\x39\x1a\x06\n\x04\x42\x31\x39\x30\x1a\x06\n\x04\x42\x31\x39\x31\x1a\x06\n\x04\x42\x31\x39\x32\x1a\x06\n\x04\x42\x31\x39\x33\x1a\x06\n\x04\x42\x31\x39\x34\x1a\x06\n\x04\x42\x31\x39\x35\x1a\x06\n\x04\x42\x31\x39\x36\x1a\x06\n\x04\x42\x31\x39\x37\x1a\x06\n\x04\x42\x31\x39\x38\x1a\x06\n\x04\x42\x31\x39\x39\x1a\x06\n\x04\x42\x32\x30\x30\x1a\x06\n\x04\x42\x32\x30\x31\x1a\x06\n\x04\x42\x32\x30\x32\x1a\x06\n\x04\x42\x32\x30\x33\x1a\x06\n\x04\x42\x32\x30\x34\x1a\x06\n\x04\x42\x32\x30\x35\x1a\x06\n\x04\x42\x32\x30\x36\x1a\x06\n\x04\x42\x32\x30\x37\x1a\x06\n\x04\x42\x32\x30\x38\x1a\x06\n\x04\x42\x32\x30\x39\x1a\x06\n\x04\x42\x32\x31\x30\x1a\x06\n\x04\x42\x32\x31\x31\x1a\x06\n\x04\x42\x32\x31\x32\x1a\x06\n\x04\x42\x32\x31\x33\x1a\x06\n\x04\x42\x32\x31\x34\x1a\x06\n\x04\x42\x32\x31\x35\x1a\x06\n\x04\x42\x32\x31\x36\x1a\x06\n\x04\x42\x32\x31\x37\x1a\x06\n\x04\x42\x32\x31\x38\x1a\x06\n\x04\x42\x32\x31\x39\x1a\x06\n\x04\x42\x32\x32\x30\x1a\x06\n\x04\x42\x32\x32\x31\x1a\x06\n\x04\x42\x32\x32\x32\x1a\x06\n\x04\x42\x32\x32\x33\x1a\x06\n\x04\x42\x32\x32\x34\x1a\x06\n\x04\x42\x32\x32\x35\x1a\x06\n\x04\x42\x32\x32\x36\x1a\x06\n\x04\x42\x32\x32\x37\x1a\x06\n\x04\x42\x32\x32\x38\x1a\x06\n\x04\x42\x32\x32\x39\x1a\x06\n\x04\x42\x32\x33\x30\x1a\x06\n\x04\x42\x32\x33\x31\x1a\x06\n\x04\x42\x32\x33\x32\x1a\x06\n\x04\x42\x32\x33\x33\x1a\x06\n\x04\x42\x32\x33\x34\x1a\x06\n\x04\x42\x32\x33\x35\x1a\x06\n\x04\x42\x32\x33\x36\x1a\x06\n\x04\x42\x32\x33\x37\x1a\x06\n\x04\x42\x32\x33\x38\x1a\x06\n\x04\x42\x32\x33\x39\x1a\x06\n\x04\x42\x32\x34\x30\x1a\x06\n\x04\x42\x32\x34\x31\x1a\x06\n\x04\x42\x32\x34\x32\x1a\x06\n\x04\x42\x32\x34\x33\x1a\x06\n\x04\x42\x32\x34\x34\x1a\x06\n\x04\x42\x32\x34\x35\x1a\x06\n\x04\x42\x32\x34\x36\x1a\x06\n\x04\x42\x32\x34\x37\x1a\x06\n\x04\x42\x32\x34\x38\x1a\x06\n\x04\x42\x32\x34\x39\x1a\x06\n\x04\x42\x32\x35\x30\x1a\x06\n\x04\x42\x32\x35\x31\x1a\x06\n\x04\x42\x32\x35\x32\x1a\x06\n\x04\x42\x32\x35\x33\x1a\x06\n\x04\x42\x32\x35\x34\x1a\x06\n\x04\x42\x32\x35\x35*\x1b\n\x02is\x12\x0b\n\x07\x64\x65\x66\x61ult\x10\x00\x12\x08\n\x04\x65lse\x10\x01:C\n\x0foptional_uint64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x04 \x01(\x04:B\n\x0eoptional_int64\x12*.google.protobuf.internal.OutOfOrderFields\x18\x02 \x01(\x03:2\n\x08\x63ontinue\x12\x1f.google.protobuf.internal.class\x18\xe9\x07 \x01(\x05:2\n\x04with\x12#.google.protobuf.internal.class.try\x18\xe9\x07 \x01(\x05') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.more_messages_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - OutOfOrderFields.RegisterExtension(optional_uint64) - OutOfOrderFields.RegisterExtension(optional_int64) - globals()['class'].RegisterExtension(globals()['continue']) - getattr(globals()['class'], 'try').RegisterExtension(globals()['with']) - globals()['class'].RegisterExtension(_EXTENDCLASS.extensions_by_name['return']) - - DESCRIPTOR._options = None - _IS._serialized_start=2669 - _IS._serialized_end=2696 - _OUTOFORDERFIELDS._serialized_start=74 - _OUTOFORDERFIELDS._serialized_end=178 - _CLASS._serialized_start=181 - _CLASS._serialized_end=514 - _CLASS_TRY._serialized_start=448 - _CLASS_TRY._serialized_end=476 - _CLASS_FOR._serialized_start=478 - _CLASS_FOR._serialized_end=506 - _EXTENDCLASS._serialized_start=516 - _EXTENDCLASS._serialized_end=579 - _TESTFULLKEYWORD._serialized_start=581 - _TESTFULLKEYWORD._serialized_end=707 - _LOTSNESTEDMESSAGE._serialized_start=710 - _LOTSNESTEDMESSAGE._serialized_end=2667 - _LOTSNESTEDMESSAGE_B0._serialized_start=731 - _LOTSNESTEDMESSAGE_B0._serialized_end=735 - _LOTSNESTEDMESSAGE_B1._serialized_start=737 - _LOTSNESTEDMESSAGE_B1._serialized_end=741 - _LOTSNESTEDMESSAGE_B2._serialized_start=743 - _LOTSNESTEDMESSAGE_B2._serialized_end=747 - _LOTSNESTEDMESSAGE_B3._serialized_start=749 - _LOTSNESTEDMESSAGE_B3._serialized_end=753 - _LOTSNESTEDMESSAGE_B4._serialized_start=755 - _LOTSNESTEDMESSAGE_B4._serialized_end=759 - _LOTSNESTEDMESSAGE_B5._serialized_start=761 - _LOTSNESTEDMESSAGE_B5._serialized_end=765 - _LOTSNESTEDMESSAGE_B6._serialized_start=767 - _LOTSNESTEDMESSAGE_B6._serialized_end=771 - _LOTSNESTEDMESSAGE_B7._serialized_start=773 - _LOTSNESTEDMESSAGE_B7._serialized_end=777 - _LOTSNESTEDMESSAGE_B8._serialized_start=779 - _LOTSNESTEDMESSAGE_B8._serialized_end=783 - _LOTSNESTEDMESSAGE_B9._serialized_start=785 - _LOTSNESTEDMESSAGE_B9._serialized_end=789 - _LOTSNESTEDMESSAGE_B10._serialized_start=791 - _LOTSNESTEDMESSAGE_B10._serialized_end=796 - _LOTSNESTEDMESSAGE_B11._serialized_start=798 - _LOTSNESTEDMESSAGE_B11._serialized_end=803 - _LOTSNESTEDMESSAGE_B12._serialized_start=805 - _LOTSNESTEDMESSAGE_B12._serialized_end=810 - _LOTSNESTEDMESSAGE_B13._serialized_start=812 - _LOTSNESTEDMESSAGE_B13._serialized_end=817 - _LOTSNESTEDMESSAGE_B14._serialized_start=819 - _LOTSNESTEDMESSAGE_B14._serialized_end=824 - _LOTSNESTEDMESSAGE_B15._serialized_start=826 - _LOTSNESTEDMESSAGE_B15._serialized_end=831 - _LOTSNESTEDMESSAGE_B16._serialized_start=833 - _LOTSNESTEDMESSAGE_B16._serialized_end=838 - _LOTSNESTEDMESSAGE_B17._serialized_start=840 - _LOTSNESTEDMESSAGE_B17._serialized_end=845 - _LOTSNESTEDMESSAGE_B18._serialized_start=847 - _LOTSNESTEDMESSAGE_B18._serialized_end=852 - _LOTSNESTEDMESSAGE_B19._serialized_start=854 - _LOTSNESTEDMESSAGE_B19._serialized_end=859 - _LOTSNESTEDMESSAGE_B20._serialized_start=861 - _LOTSNESTEDMESSAGE_B20._serialized_end=866 - _LOTSNESTEDMESSAGE_B21._serialized_start=868 - _LOTSNESTEDMESSAGE_B21._serialized_end=873 - _LOTSNESTEDMESSAGE_B22._serialized_start=875 - _LOTSNESTEDMESSAGE_B22._serialized_end=880 - _LOTSNESTEDMESSAGE_B23._serialized_start=882 - _LOTSNESTEDMESSAGE_B23._serialized_end=887 - _LOTSNESTEDMESSAGE_B24._serialized_start=889 - _LOTSNESTEDMESSAGE_B24._serialized_end=894 - _LOTSNESTEDMESSAGE_B25._serialized_start=896 - _LOTSNESTEDMESSAGE_B25._serialized_end=901 - _LOTSNESTEDMESSAGE_B26._serialized_start=903 - _LOTSNESTEDMESSAGE_B26._serialized_end=908 - _LOTSNESTEDMESSAGE_B27._serialized_start=910 - _LOTSNESTEDMESSAGE_B27._serialized_end=915 - _LOTSNESTEDMESSAGE_B28._serialized_start=917 - _LOTSNESTEDMESSAGE_B28._serialized_end=922 - _LOTSNESTEDMESSAGE_B29._serialized_start=924 - _LOTSNESTEDMESSAGE_B29._serialized_end=929 - _LOTSNESTEDMESSAGE_B30._serialized_start=931 - _LOTSNESTEDMESSAGE_B30._serialized_end=936 - _LOTSNESTEDMESSAGE_B31._serialized_start=938 - _LOTSNESTEDMESSAGE_B31._serialized_end=943 - _LOTSNESTEDMESSAGE_B32._serialized_start=945 - _LOTSNESTEDMESSAGE_B32._serialized_end=950 - _LOTSNESTEDMESSAGE_B33._serialized_start=952 - _LOTSNESTEDMESSAGE_B33._serialized_end=957 - _LOTSNESTEDMESSAGE_B34._serialized_start=959 - _LOTSNESTEDMESSAGE_B34._serialized_end=964 - _LOTSNESTEDMESSAGE_B35._serialized_start=966 - _LOTSNESTEDMESSAGE_B35._serialized_end=971 - _LOTSNESTEDMESSAGE_B36._serialized_start=973 - _LOTSNESTEDMESSAGE_B36._serialized_end=978 - _LOTSNESTEDMESSAGE_B37._serialized_start=980 - _LOTSNESTEDMESSAGE_B37._serialized_end=985 - _LOTSNESTEDMESSAGE_B38._serialized_start=987 - _LOTSNESTEDMESSAGE_B38._serialized_end=992 - _LOTSNESTEDMESSAGE_B39._serialized_start=994 - _LOTSNESTEDMESSAGE_B39._serialized_end=999 - _LOTSNESTEDMESSAGE_B40._serialized_start=1001 - _LOTSNESTEDMESSAGE_B40._serialized_end=1006 - _LOTSNESTEDMESSAGE_B41._serialized_start=1008 - _LOTSNESTEDMESSAGE_B41._serialized_end=1013 - _LOTSNESTEDMESSAGE_B42._serialized_start=1015 - _LOTSNESTEDMESSAGE_B42._serialized_end=1020 - _LOTSNESTEDMESSAGE_B43._serialized_start=1022 - _LOTSNESTEDMESSAGE_B43._serialized_end=1027 - _LOTSNESTEDMESSAGE_B44._serialized_start=1029 - _LOTSNESTEDMESSAGE_B44._serialized_end=1034 - _LOTSNESTEDMESSAGE_B45._serialized_start=1036 - _LOTSNESTEDMESSAGE_B45._serialized_end=1041 - _LOTSNESTEDMESSAGE_B46._serialized_start=1043 - _LOTSNESTEDMESSAGE_B46._serialized_end=1048 - _LOTSNESTEDMESSAGE_B47._serialized_start=1050 - _LOTSNESTEDMESSAGE_B47._serialized_end=1055 - _LOTSNESTEDMESSAGE_B48._serialized_start=1057 - _LOTSNESTEDMESSAGE_B48._serialized_end=1062 - _LOTSNESTEDMESSAGE_B49._serialized_start=1064 - _LOTSNESTEDMESSAGE_B49._serialized_end=1069 - _LOTSNESTEDMESSAGE_B50._serialized_start=1071 - _LOTSNESTEDMESSAGE_B50._serialized_end=1076 - _LOTSNESTEDMESSAGE_B51._serialized_start=1078 - _LOTSNESTEDMESSAGE_B51._serialized_end=1083 - _LOTSNESTEDMESSAGE_B52._serialized_start=1085 - _LOTSNESTEDMESSAGE_B52._serialized_end=1090 - _LOTSNESTEDMESSAGE_B53._serialized_start=1092 - _LOTSNESTEDMESSAGE_B53._serialized_end=1097 - _LOTSNESTEDMESSAGE_B54._serialized_start=1099 - _LOTSNESTEDMESSAGE_B54._serialized_end=1104 - _LOTSNESTEDMESSAGE_B55._serialized_start=1106 - _LOTSNESTEDMESSAGE_B55._serialized_end=1111 - _LOTSNESTEDMESSAGE_B56._serialized_start=1113 - _LOTSNESTEDMESSAGE_B56._serialized_end=1118 - _LOTSNESTEDMESSAGE_B57._serialized_start=1120 - _LOTSNESTEDMESSAGE_B57._serialized_end=1125 - _LOTSNESTEDMESSAGE_B58._serialized_start=1127 - _LOTSNESTEDMESSAGE_B58._serialized_end=1132 - _LOTSNESTEDMESSAGE_B59._serialized_start=1134 - _LOTSNESTEDMESSAGE_B59._serialized_end=1139 - _LOTSNESTEDMESSAGE_B60._serialized_start=1141 - _LOTSNESTEDMESSAGE_B60._serialized_end=1146 - _LOTSNESTEDMESSAGE_B61._serialized_start=1148 - _LOTSNESTEDMESSAGE_B61._serialized_end=1153 - _LOTSNESTEDMESSAGE_B62._serialized_start=1155 - _LOTSNESTEDMESSAGE_B62._serialized_end=1160 - _LOTSNESTEDMESSAGE_B63._serialized_start=1162 - _LOTSNESTEDMESSAGE_B63._serialized_end=1167 - _LOTSNESTEDMESSAGE_B64._serialized_start=1169 - _LOTSNESTEDMESSAGE_B64._serialized_end=1174 - _LOTSNESTEDMESSAGE_B65._serialized_start=1176 - _LOTSNESTEDMESSAGE_B65._serialized_end=1181 - _LOTSNESTEDMESSAGE_B66._serialized_start=1183 - _LOTSNESTEDMESSAGE_B66._serialized_end=1188 - _LOTSNESTEDMESSAGE_B67._serialized_start=1190 - _LOTSNESTEDMESSAGE_B67._serialized_end=1195 - _LOTSNESTEDMESSAGE_B68._serialized_start=1197 - _LOTSNESTEDMESSAGE_B68._serialized_end=1202 - _LOTSNESTEDMESSAGE_B69._serialized_start=1204 - _LOTSNESTEDMESSAGE_B69._serialized_end=1209 - _LOTSNESTEDMESSAGE_B70._serialized_start=1211 - _LOTSNESTEDMESSAGE_B70._serialized_end=1216 - _LOTSNESTEDMESSAGE_B71._serialized_start=1218 - _LOTSNESTEDMESSAGE_B71._serialized_end=1223 - _LOTSNESTEDMESSAGE_B72._serialized_start=1225 - _LOTSNESTEDMESSAGE_B72._serialized_end=1230 - _LOTSNESTEDMESSAGE_B73._serialized_start=1232 - _LOTSNESTEDMESSAGE_B73._serialized_end=1237 - _LOTSNESTEDMESSAGE_B74._serialized_start=1239 - _LOTSNESTEDMESSAGE_B74._serialized_end=1244 - _LOTSNESTEDMESSAGE_B75._serialized_start=1246 - _LOTSNESTEDMESSAGE_B75._serialized_end=1251 - _LOTSNESTEDMESSAGE_B76._serialized_start=1253 - _LOTSNESTEDMESSAGE_B76._serialized_end=1258 - _LOTSNESTEDMESSAGE_B77._serialized_start=1260 - _LOTSNESTEDMESSAGE_B77._serialized_end=1265 - _LOTSNESTEDMESSAGE_B78._serialized_start=1267 - _LOTSNESTEDMESSAGE_B78._serialized_end=1272 - _LOTSNESTEDMESSAGE_B79._serialized_start=1274 - _LOTSNESTEDMESSAGE_B79._serialized_end=1279 - _LOTSNESTEDMESSAGE_B80._serialized_start=1281 - _LOTSNESTEDMESSAGE_B80._serialized_end=1286 - _LOTSNESTEDMESSAGE_B81._serialized_start=1288 - _LOTSNESTEDMESSAGE_B81._serialized_end=1293 - _LOTSNESTEDMESSAGE_B82._serialized_start=1295 - _LOTSNESTEDMESSAGE_B82._serialized_end=1300 - _LOTSNESTEDMESSAGE_B83._serialized_start=1302 - _LOTSNESTEDMESSAGE_B83._serialized_end=1307 - _LOTSNESTEDMESSAGE_B84._serialized_start=1309 - _LOTSNESTEDMESSAGE_B84._serialized_end=1314 - _LOTSNESTEDMESSAGE_B85._serialized_start=1316 - _LOTSNESTEDMESSAGE_B85._serialized_end=1321 - _LOTSNESTEDMESSAGE_B86._serialized_start=1323 - _LOTSNESTEDMESSAGE_B86._serialized_end=1328 - _LOTSNESTEDMESSAGE_B87._serialized_start=1330 - _LOTSNESTEDMESSAGE_B87._serialized_end=1335 - _LOTSNESTEDMESSAGE_B88._serialized_start=1337 - _LOTSNESTEDMESSAGE_B88._serialized_end=1342 - _LOTSNESTEDMESSAGE_B89._serialized_start=1344 - _LOTSNESTEDMESSAGE_B89._serialized_end=1349 - _LOTSNESTEDMESSAGE_B90._serialized_start=1351 - _LOTSNESTEDMESSAGE_B90._serialized_end=1356 - _LOTSNESTEDMESSAGE_B91._serialized_start=1358 - _LOTSNESTEDMESSAGE_B91._serialized_end=1363 - _LOTSNESTEDMESSAGE_B92._serialized_start=1365 - _LOTSNESTEDMESSAGE_B92._serialized_end=1370 - _LOTSNESTEDMESSAGE_B93._serialized_start=1372 - _LOTSNESTEDMESSAGE_B93._serialized_end=1377 - _LOTSNESTEDMESSAGE_B94._serialized_start=1379 - _LOTSNESTEDMESSAGE_B94._serialized_end=1384 - _LOTSNESTEDMESSAGE_B95._serialized_start=1386 - _LOTSNESTEDMESSAGE_B95._serialized_end=1391 - _LOTSNESTEDMESSAGE_B96._serialized_start=1393 - _LOTSNESTEDMESSAGE_B96._serialized_end=1398 - _LOTSNESTEDMESSAGE_B97._serialized_start=1400 - _LOTSNESTEDMESSAGE_B97._serialized_end=1405 - _LOTSNESTEDMESSAGE_B98._serialized_start=1407 - _LOTSNESTEDMESSAGE_B98._serialized_end=1412 - _LOTSNESTEDMESSAGE_B99._serialized_start=1414 - _LOTSNESTEDMESSAGE_B99._serialized_end=1419 - _LOTSNESTEDMESSAGE_B100._serialized_start=1421 - _LOTSNESTEDMESSAGE_B100._serialized_end=1427 - _LOTSNESTEDMESSAGE_B101._serialized_start=1429 - _LOTSNESTEDMESSAGE_B101._serialized_end=1435 - _LOTSNESTEDMESSAGE_B102._serialized_start=1437 - _LOTSNESTEDMESSAGE_B102._serialized_end=1443 - _LOTSNESTEDMESSAGE_B103._serialized_start=1445 - _LOTSNESTEDMESSAGE_B103._serialized_end=1451 - _LOTSNESTEDMESSAGE_B104._serialized_start=1453 - _LOTSNESTEDMESSAGE_B104._serialized_end=1459 - _LOTSNESTEDMESSAGE_B105._serialized_start=1461 - _LOTSNESTEDMESSAGE_B105._serialized_end=1467 - _LOTSNESTEDMESSAGE_B106._serialized_start=1469 - _LOTSNESTEDMESSAGE_B106._serialized_end=1475 - _LOTSNESTEDMESSAGE_B107._serialized_start=1477 - _LOTSNESTEDMESSAGE_B107._serialized_end=1483 - _LOTSNESTEDMESSAGE_B108._serialized_start=1485 - _LOTSNESTEDMESSAGE_B108._serialized_end=1491 - _LOTSNESTEDMESSAGE_B109._serialized_start=1493 - _LOTSNESTEDMESSAGE_B109._serialized_end=1499 - _LOTSNESTEDMESSAGE_B110._serialized_start=1501 - _LOTSNESTEDMESSAGE_B110._serialized_end=1507 - _LOTSNESTEDMESSAGE_B111._serialized_start=1509 - _LOTSNESTEDMESSAGE_B111._serialized_end=1515 - _LOTSNESTEDMESSAGE_B112._serialized_start=1517 - _LOTSNESTEDMESSAGE_B112._serialized_end=1523 - _LOTSNESTEDMESSAGE_B113._serialized_start=1525 - _LOTSNESTEDMESSAGE_B113._serialized_end=1531 - _LOTSNESTEDMESSAGE_B114._serialized_start=1533 - _LOTSNESTEDMESSAGE_B114._serialized_end=1539 - _LOTSNESTEDMESSAGE_B115._serialized_start=1541 - _LOTSNESTEDMESSAGE_B115._serialized_end=1547 - _LOTSNESTEDMESSAGE_B116._serialized_start=1549 - _LOTSNESTEDMESSAGE_B116._serialized_end=1555 - _LOTSNESTEDMESSAGE_B117._serialized_start=1557 - _LOTSNESTEDMESSAGE_B117._serialized_end=1563 - _LOTSNESTEDMESSAGE_B118._serialized_start=1565 - _LOTSNESTEDMESSAGE_B118._serialized_end=1571 - _LOTSNESTEDMESSAGE_B119._serialized_start=1573 - _LOTSNESTEDMESSAGE_B119._serialized_end=1579 - _LOTSNESTEDMESSAGE_B120._serialized_start=1581 - _LOTSNESTEDMESSAGE_B120._serialized_end=1587 - _LOTSNESTEDMESSAGE_B121._serialized_start=1589 - _LOTSNESTEDMESSAGE_B121._serialized_end=1595 - _LOTSNESTEDMESSAGE_B122._serialized_start=1597 - _LOTSNESTEDMESSAGE_B122._serialized_end=1603 - _LOTSNESTEDMESSAGE_B123._serialized_start=1605 - _LOTSNESTEDMESSAGE_B123._serialized_end=1611 - _LOTSNESTEDMESSAGE_B124._serialized_start=1613 - _LOTSNESTEDMESSAGE_B124._serialized_end=1619 - _LOTSNESTEDMESSAGE_B125._serialized_start=1621 - _LOTSNESTEDMESSAGE_B125._serialized_end=1627 - _LOTSNESTEDMESSAGE_B126._serialized_start=1629 - _LOTSNESTEDMESSAGE_B126._serialized_end=1635 - _LOTSNESTEDMESSAGE_B127._serialized_start=1637 - _LOTSNESTEDMESSAGE_B127._serialized_end=1643 - _LOTSNESTEDMESSAGE_B128._serialized_start=1645 - _LOTSNESTEDMESSAGE_B128._serialized_end=1651 - _LOTSNESTEDMESSAGE_B129._serialized_start=1653 - _LOTSNESTEDMESSAGE_B129._serialized_end=1659 - _LOTSNESTEDMESSAGE_B130._serialized_start=1661 - _LOTSNESTEDMESSAGE_B130._serialized_end=1667 - _LOTSNESTEDMESSAGE_B131._serialized_start=1669 - _LOTSNESTEDMESSAGE_B131._serialized_end=1675 - _LOTSNESTEDMESSAGE_B132._serialized_start=1677 - _LOTSNESTEDMESSAGE_B132._serialized_end=1683 - _LOTSNESTEDMESSAGE_B133._serialized_start=1685 - _LOTSNESTEDMESSAGE_B133._serialized_end=1691 - _LOTSNESTEDMESSAGE_B134._serialized_start=1693 - _LOTSNESTEDMESSAGE_B134._serialized_end=1699 - _LOTSNESTEDMESSAGE_B135._serialized_start=1701 - _LOTSNESTEDMESSAGE_B135._serialized_end=1707 - _LOTSNESTEDMESSAGE_B136._serialized_start=1709 - _LOTSNESTEDMESSAGE_B136._serialized_end=1715 - _LOTSNESTEDMESSAGE_B137._serialized_start=1717 - _LOTSNESTEDMESSAGE_B137._serialized_end=1723 - _LOTSNESTEDMESSAGE_B138._serialized_start=1725 - _LOTSNESTEDMESSAGE_B138._serialized_end=1731 - _LOTSNESTEDMESSAGE_B139._serialized_start=1733 - _LOTSNESTEDMESSAGE_B139._serialized_end=1739 - _LOTSNESTEDMESSAGE_B140._serialized_start=1741 - _LOTSNESTEDMESSAGE_B140._serialized_end=1747 - _LOTSNESTEDMESSAGE_B141._serialized_start=1749 - _LOTSNESTEDMESSAGE_B141._serialized_end=1755 - _LOTSNESTEDMESSAGE_B142._serialized_start=1757 - _LOTSNESTEDMESSAGE_B142._serialized_end=1763 - _LOTSNESTEDMESSAGE_B143._serialized_start=1765 - _LOTSNESTEDMESSAGE_B143._serialized_end=1771 - _LOTSNESTEDMESSAGE_B144._serialized_start=1773 - _LOTSNESTEDMESSAGE_B144._serialized_end=1779 - _LOTSNESTEDMESSAGE_B145._serialized_start=1781 - _LOTSNESTEDMESSAGE_B145._serialized_end=1787 - _LOTSNESTEDMESSAGE_B146._serialized_start=1789 - _LOTSNESTEDMESSAGE_B146._serialized_end=1795 - _LOTSNESTEDMESSAGE_B147._serialized_start=1797 - _LOTSNESTEDMESSAGE_B147._serialized_end=1803 - _LOTSNESTEDMESSAGE_B148._serialized_start=1805 - _LOTSNESTEDMESSAGE_B148._serialized_end=1811 - _LOTSNESTEDMESSAGE_B149._serialized_start=1813 - _LOTSNESTEDMESSAGE_B149._serialized_end=1819 - _LOTSNESTEDMESSAGE_B150._serialized_start=1821 - _LOTSNESTEDMESSAGE_B150._serialized_end=1827 - _LOTSNESTEDMESSAGE_B151._serialized_start=1829 - _LOTSNESTEDMESSAGE_B151._serialized_end=1835 - _LOTSNESTEDMESSAGE_B152._serialized_start=1837 - _LOTSNESTEDMESSAGE_B152._serialized_end=1843 - _LOTSNESTEDMESSAGE_B153._serialized_start=1845 - _LOTSNESTEDMESSAGE_B153._serialized_end=1851 - _LOTSNESTEDMESSAGE_B154._serialized_start=1853 - _LOTSNESTEDMESSAGE_B154._serialized_end=1859 - _LOTSNESTEDMESSAGE_B155._serialized_start=1861 - _LOTSNESTEDMESSAGE_B155._serialized_end=1867 - _LOTSNESTEDMESSAGE_B156._serialized_start=1869 - _LOTSNESTEDMESSAGE_B156._serialized_end=1875 - _LOTSNESTEDMESSAGE_B157._serialized_start=1877 - _LOTSNESTEDMESSAGE_B157._serialized_end=1883 - _LOTSNESTEDMESSAGE_B158._serialized_start=1885 - _LOTSNESTEDMESSAGE_B158._serialized_end=1891 - _LOTSNESTEDMESSAGE_B159._serialized_start=1893 - _LOTSNESTEDMESSAGE_B159._serialized_end=1899 - _LOTSNESTEDMESSAGE_B160._serialized_start=1901 - _LOTSNESTEDMESSAGE_B160._serialized_end=1907 - _LOTSNESTEDMESSAGE_B161._serialized_start=1909 - _LOTSNESTEDMESSAGE_B161._serialized_end=1915 - _LOTSNESTEDMESSAGE_B162._serialized_start=1917 - _LOTSNESTEDMESSAGE_B162._serialized_end=1923 - _LOTSNESTEDMESSAGE_B163._serialized_start=1925 - _LOTSNESTEDMESSAGE_B163._serialized_end=1931 - _LOTSNESTEDMESSAGE_B164._serialized_start=1933 - _LOTSNESTEDMESSAGE_B164._serialized_end=1939 - _LOTSNESTEDMESSAGE_B165._serialized_start=1941 - _LOTSNESTEDMESSAGE_B165._serialized_end=1947 - _LOTSNESTEDMESSAGE_B166._serialized_start=1949 - _LOTSNESTEDMESSAGE_B166._serialized_end=1955 - _LOTSNESTEDMESSAGE_B167._serialized_start=1957 - _LOTSNESTEDMESSAGE_B167._serialized_end=1963 - _LOTSNESTEDMESSAGE_B168._serialized_start=1965 - _LOTSNESTEDMESSAGE_B168._serialized_end=1971 - _LOTSNESTEDMESSAGE_B169._serialized_start=1973 - _LOTSNESTEDMESSAGE_B169._serialized_end=1979 - _LOTSNESTEDMESSAGE_B170._serialized_start=1981 - _LOTSNESTEDMESSAGE_B170._serialized_end=1987 - _LOTSNESTEDMESSAGE_B171._serialized_start=1989 - _LOTSNESTEDMESSAGE_B171._serialized_end=1995 - _LOTSNESTEDMESSAGE_B172._serialized_start=1997 - _LOTSNESTEDMESSAGE_B172._serialized_end=2003 - _LOTSNESTEDMESSAGE_B173._serialized_start=2005 - _LOTSNESTEDMESSAGE_B173._serialized_end=2011 - _LOTSNESTEDMESSAGE_B174._serialized_start=2013 - _LOTSNESTEDMESSAGE_B174._serialized_end=2019 - _LOTSNESTEDMESSAGE_B175._serialized_start=2021 - _LOTSNESTEDMESSAGE_B175._serialized_end=2027 - _LOTSNESTEDMESSAGE_B176._serialized_start=2029 - _LOTSNESTEDMESSAGE_B176._serialized_end=2035 - _LOTSNESTEDMESSAGE_B177._serialized_start=2037 - _LOTSNESTEDMESSAGE_B177._serialized_end=2043 - _LOTSNESTEDMESSAGE_B178._serialized_start=2045 - _LOTSNESTEDMESSAGE_B178._serialized_end=2051 - _LOTSNESTEDMESSAGE_B179._serialized_start=2053 - _LOTSNESTEDMESSAGE_B179._serialized_end=2059 - _LOTSNESTEDMESSAGE_B180._serialized_start=2061 - _LOTSNESTEDMESSAGE_B180._serialized_end=2067 - _LOTSNESTEDMESSAGE_B181._serialized_start=2069 - _LOTSNESTEDMESSAGE_B181._serialized_end=2075 - _LOTSNESTEDMESSAGE_B182._serialized_start=2077 - _LOTSNESTEDMESSAGE_B182._serialized_end=2083 - _LOTSNESTEDMESSAGE_B183._serialized_start=2085 - _LOTSNESTEDMESSAGE_B183._serialized_end=2091 - _LOTSNESTEDMESSAGE_B184._serialized_start=2093 - _LOTSNESTEDMESSAGE_B184._serialized_end=2099 - _LOTSNESTEDMESSAGE_B185._serialized_start=2101 - _LOTSNESTEDMESSAGE_B185._serialized_end=2107 - _LOTSNESTEDMESSAGE_B186._serialized_start=2109 - _LOTSNESTEDMESSAGE_B186._serialized_end=2115 - _LOTSNESTEDMESSAGE_B187._serialized_start=2117 - _LOTSNESTEDMESSAGE_B187._serialized_end=2123 - _LOTSNESTEDMESSAGE_B188._serialized_start=2125 - _LOTSNESTEDMESSAGE_B188._serialized_end=2131 - _LOTSNESTEDMESSAGE_B189._serialized_start=2133 - _LOTSNESTEDMESSAGE_B189._serialized_end=2139 - _LOTSNESTEDMESSAGE_B190._serialized_start=2141 - _LOTSNESTEDMESSAGE_B190._serialized_end=2147 - _LOTSNESTEDMESSAGE_B191._serialized_start=2149 - _LOTSNESTEDMESSAGE_B191._serialized_end=2155 - _LOTSNESTEDMESSAGE_B192._serialized_start=2157 - _LOTSNESTEDMESSAGE_B192._serialized_end=2163 - _LOTSNESTEDMESSAGE_B193._serialized_start=2165 - _LOTSNESTEDMESSAGE_B193._serialized_end=2171 - _LOTSNESTEDMESSAGE_B194._serialized_start=2173 - _LOTSNESTEDMESSAGE_B194._serialized_end=2179 - _LOTSNESTEDMESSAGE_B195._serialized_start=2181 - _LOTSNESTEDMESSAGE_B195._serialized_end=2187 - _LOTSNESTEDMESSAGE_B196._serialized_start=2189 - _LOTSNESTEDMESSAGE_B196._serialized_end=2195 - _LOTSNESTEDMESSAGE_B197._serialized_start=2197 - _LOTSNESTEDMESSAGE_B197._serialized_end=2203 - _LOTSNESTEDMESSAGE_B198._serialized_start=2205 - _LOTSNESTEDMESSAGE_B198._serialized_end=2211 - _LOTSNESTEDMESSAGE_B199._serialized_start=2213 - _LOTSNESTEDMESSAGE_B199._serialized_end=2219 - _LOTSNESTEDMESSAGE_B200._serialized_start=2221 - _LOTSNESTEDMESSAGE_B200._serialized_end=2227 - _LOTSNESTEDMESSAGE_B201._serialized_start=2229 - _LOTSNESTEDMESSAGE_B201._serialized_end=2235 - _LOTSNESTEDMESSAGE_B202._serialized_start=2237 - _LOTSNESTEDMESSAGE_B202._serialized_end=2243 - _LOTSNESTEDMESSAGE_B203._serialized_start=2245 - _LOTSNESTEDMESSAGE_B203._serialized_end=2251 - _LOTSNESTEDMESSAGE_B204._serialized_start=2253 - _LOTSNESTEDMESSAGE_B204._serialized_end=2259 - _LOTSNESTEDMESSAGE_B205._serialized_start=2261 - _LOTSNESTEDMESSAGE_B205._serialized_end=2267 - _LOTSNESTEDMESSAGE_B206._serialized_start=2269 - _LOTSNESTEDMESSAGE_B206._serialized_end=2275 - _LOTSNESTEDMESSAGE_B207._serialized_start=2277 - _LOTSNESTEDMESSAGE_B207._serialized_end=2283 - _LOTSNESTEDMESSAGE_B208._serialized_start=2285 - _LOTSNESTEDMESSAGE_B208._serialized_end=2291 - _LOTSNESTEDMESSAGE_B209._serialized_start=2293 - _LOTSNESTEDMESSAGE_B209._serialized_end=2299 - _LOTSNESTEDMESSAGE_B210._serialized_start=2301 - _LOTSNESTEDMESSAGE_B210._serialized_end=2307 - _LOTSNESTEDMESSAGE_B211._serialized_start=2309 - _LOTSNESTEDMESSAGE_B211._serialized_end=2315 - _LOTSNESTEDMESSAGE_B212._serialized_start=2317 - _LOTSNESTEDMESSAGE_B212._serialized_end=2323 - _LOTSNESTEDMESSAGE_B213._serialized_start=2325 - _LOTSNESTEDMESSAGE_B213._serialized_end=2331 - _LOTSNESTEDMESSAGE_B214._serialized_start=2333 - _LOTSNESTEDMESSAGE_B214._serialized_end=2339 - _LOTSNESTEDMESSAGE_B215._serialized_start=2341 - _LOTSNESTEDMESSAGE_B215._serialized_end=2347 - _LOTSNESTEDMESSAGE_B216._serialized_start=2349 - _LOTSNESTEDMESSAGE_B216._serialized_end=2355 - _LOTSNESTEDMESSAGE_B217._serialized_start=2357 - _LOTSNESTEDMESSAGE_B217._serialized_end=2363 - _LOTSNESTEDMESSAGE_B218._serialized_start=2365 - _LOTSNESTEDMESSAGE_B218._serialized_end=2371 - _LOTSNESTEDMESSAGE_B219._serialized_start=2373 - _LOTSNESTEDMESSAGE_B219._serialized_end=2379 - _LOTSNESTEDMESSAGE_B220._serialized_start=2381 - _LOTSNESTEDMESSAGE_B220._serialized_end=2387 - _LOTSNESTEDMESSAGE_B221._serialized_start=2389 - _LOTSNESTEDMESSAGE_B221._serialized_end=2395 - _LOTSNESTEDMESSAGE_B222._serialized_start=2397 - _LOTSNESTEDMESSAGE_B222._serialized_end=2403 - _LOTSNESTEDMESSAGE_B223._serialized_start=2405 - _LOTSNESTEDMESSAGE_B223._serialized_end=2411 - _LOTSNESTEDMESSAGE_B224._serialized_start=2413 - _LOTSNESTEDMESSAGE_B224._serialized_end=2419 - _LOTSNESTEDMESSAGE_B225._serialized_start=2421 - _LOTSNESTEDMESSAGE_B225._serialized_end=2427 - _LOTSNESTEDMESSAGE_B226._serialized_start=2429 - _LOTSNESTEDMESSAGE_B226._serialized_end=2435 - _LOTSNESTEDMESSAGE_B227._serialized_start=2437 - _LOTSNESTEDMESSAGE_B227._serialized_end=2443 - _LOTSNESTEDMESSAGE_B228._serialized_start=2445 - _LOTSNESTEDMESSAGE_B228._serialized_end=2451 - _LOTSNESTEDMESSAGE_B229._serialized_start=2453 - _LOTSNESTEDMESSAGE_B229._serialized_end=2459 - _LOTSNESTEDMESSAGE_B230._serialized_start=2461 - _LOTSNESTEDMESSAGE_B230._serialized_end=2467 - _LOTSNESTEDMESSAGE_B231._serialized_start=2469 - _LOTSNESTEDMESSAGE_B231._serialized_end=2475 - _LOTSNESTEDMESSAGE_B232._serialized_start=2477 - _LOTSNESTEDMESSAGE_B232._serialized_end=2483 - _LOTSNESTEDMESSAGE_B233._serialized_start=2485 - _LOTSNESTEDMESSAGE_B233._serialized_end=2491 - _LOTSNESTEDMESSAGE_B234._serialized_start=2493 - _LOTSNESTEDMESSAGE_B234._serialized_end=2499 - _LOTSNESTEDMESSAGE_B235._serialized_start=2501 - _LOTSNESTEDMESSAGE_B235._serialized_end=2507 - _LOTSNESTEDMESSAGE_B236._serialized_start=2509 - _LOTSNESTEDMESSAGE_B236._serialized_end=2515 - _LOTSNESTEDMESSAGE_B237._serialized_start=2517 - _LOTSNESTEDMESSAGE_B237._serialized_end=2523 - _LOTSNESTEDMESSAGE_B238._serialized_start=2525 - _LOTSNESTEDMESSAGE_B238._serialized_end=2531 - _LOTSNESTEDMESSAGE_B239._serialized_start=2533 - _LOTSNESTEDMESSAGE_B239._serialized_end=2539 - _LOTSNESTEDMESSAGE_B240._serialized_start=2541 - _LOTSNESTEDMESSAGE_B240._serialized_end=2547 - _LOTSNESTEDMESSAGE_B241._serialized_start=2549 - _LOTSNESTEDMESSAGE_B241._serialized_end=2555 - _LOTSNESTEDMESSAGE_B242._serialized_start=2557 - _LOTSNESTEDMESSAGE_B242._serialized_end=2563 - _LOTSNESTEDMESSAGE_B243._serialized_start=2565 - _LOTSNESTEDMESSAGE_B243._serialized_end=2571 - _LOTSNESTEDMESSAGE_B244._serialized_start=2573 - _LOTSNESTEDMESSAGE_B244._serialized_end=2579 - _LOTSNESTEDMESSAGE_B245._serialized_start=2581 - _LOTSNESTEDMESSAGE_B245._serialized_end=2587 - _LOTSNESTEDMESSAGE_B246._serialized_start=2589 - _LOTSNESTEDMESSAGE_B246._serialized_end=2595 - _LOTSNESTEDMESSAGE_B247._serialized_start=2597 - _LOTSNESTEDMESSAGE_B247._serialized_end=2603 - _LOTSNESTEDMESSAGE_B248._serialized_start=2605 - _LOTSNESTEDMESSAGE_B248._serialized_end=2611 - _LOTSNESTEDMESSAGE_B249._serialized_start=2613 - _LOTSNESTEDMESSAGE_B249._serialized_end=2619 - _LOTSNESTEDMESSAGE_B250._serialized_start=2621 - _LOTSNESTEDMESSAGE_B250._serialized_end=2627 - _LOTSNESTEDMESSAGE_B251._serialized_start=2629 - _LOTSNESTEDMESSAGE_B251._serialized_end=2635 - _LOTSNESTEDMESSAGE_B252._serialized_start=2637 - _LOTSNESTEDMESSAGE_B252._serialized_end=2643 - _LOTSNESTEDMESSAGE_B253._serialized_start=2645 - _LOTSNESTEDMESSAGE_B253._serialized_end=2651 - _LOTSNESTEDMESSAGE_B254._serialized_start=2653 - _LOTSNESTEDMESSAGE_B254._serialized_end=2659 - _LOTSNESTEDMESSAGE_B255._serialized_start=2661 - _LOTSNESTEDMESSAGE_B255._serialized_end=2667 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/no_package_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/no_package_pb2.py deleted file mode 100644 index d46dee080a..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/no_package_pb2.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/internal/no_package.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.internal.no_package_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _NOPACKAGEENUM._serialized_start=106 - _NOPACKAGEENUM._serialized_end=169 - _NOPACKAGEMESSAGE._serialized_start=45 - _NOPACKAGEMESSAGE._serialized_end=104 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/python_message.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/python_message.py deleted file mode 100644 index 2921d5cb6e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/python_message.py +++ /dev/null @@ -1,1539 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# This code is meant to work on Python 2.4 and above only. -# -# TODO(robinson): Helpers for verbose, common checks like seeing if a -# descriptor's cpp_type is CPPTYPE_MESSAGE. - -"""Contains a metaclass and helper functions used to create -protocol message classes from Descriptor objects at runtime. - -Recall that a metaclass is the "type" of a class. -(A class is to a metaclass what an instance is to a class.) - -In this case, we use the GeneratedProtocolMessageType metaclass -to inject all the useful functionality into the classes -output by the protocol compiler at compile-time. - -The upshot of all this is that the real implementation -details for ALL pure-Python protocol buffers are *here in -this file*. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - -from io import BytesIO -import struct -import sys -import weakref - -# We use "as" to avoid name collisions with variables. -from google.protobuf.internal import api_implementation -from google.protobuf.internal import containers -from google.protobuf.internal import decoder -from google.protobuf.internal import encoder -from google.protobuf.internal import enum_type_wrapper -from google.protobuf.internal import extension_dict -from google.protobuf.internal import message_listener as message_listener_mod -from google.protobuf.internal import type_checkers -from google.protobuf.internal import well_known_types -from google.protobuf.internal import wire_format -from google.protobuf import descriptor as descriptor_mod -from google.protobuf import message as message_mod -from google.protobuf import text_format - -_FieldDescriptor = descriptor_mod.FieldDescriptor -_AnyFullTypeName = 'google.protobuf.Any' -_ExtensionDict = extension_dict._ExtensionDict - -class GeneratedProtocolMessageType(type): - - """Metaclass for protocol message classes created at runtime from Descriptors. - - We add implementations for all methods described in the Message class. We - also create properties to allow getting/setting all fields in the protocol - message. Finally, we create slots to prevent users from accidentally - "setting" nonexistent fields in the protocol message, which then wouldn't get - serialized / deserialized properly. - - The protocol compiler currently uses this metaclass to create protocol - message classes at runtime. Clients can also manually create their own - classes at runtime, as in this example: - - mydescriptor = Descriptor(.....) - factory = symbol_database.Default() - factory.pool.AddDescriptor(mydescriptor) - MyProtoClass = factory.GetPrototype(mydescriptor) - myproto_instance = MyProtoClass() - myproto.foo_field = 23 - ... - """ - - # Must be consistent with the protocol-compiler code in - # proto2/compiler/internal/generator.*. - _DESCRIPTOR_KEY = 'DESCRIPTOR' - - def __new__(cls, name, bases, dictionary): - """Custom allocation for runtime-generated class types. - - We override __new__ because this is apparently the only place - where we can meaningfully set __slots__ on the class we're creating(?). - (The interplay between metaclasses and slots is not very well-documented). - - Args: - name: Name of the class (ignored, but required by the - metaclass protocol). - bases: Base classes of the class we're constructing. - (Should be message.Message). We ignore this field, but - it's required by the metaclass protocol - dictionary: The class dictionary of the class we're - constructing. dictionary[_DESCRIPTOR_KEY] must contain - a Descriptor object describing this protocol message - type. - - Returns: - Newly-allocated class. - - Raises: - RuntimeError: Generated code only work with python cpp extension. - """ - descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] - - if isinstance(descriptor, str): - raise RuntimeError('The generated code only work with python cpp ' - 'extension, but it is using pure python runtime.') - - # If a concrete class already exists for this descriptor, don't try to - # create another. Doing so will break any messages that already exist with - # the existing class. - # - # The C++ implementation appears to have its own internal `PyMessageFactory` - # to achieve similar results. - # - # This most commonly happens in `text_format.py` when using descriptors from - # a custom pool; it calls symbol_database.Global().getPrototype() on a - # descriptor which already has an existing concrete class. - new_class = getattr(descriptor, '_concrete_class', None) - if new_class: - return new_class - - if descriptor.full_name in well_known_types.WKTBASES: - bases += (well_known_types.WKTBASES[descriptor.full_name],) - _AddClassAttributesForNestedExtensions(descriptor, dictionary) - _AddSlots(descriptor, dictionary) - - superclass = super(GeneratedProtocolMessageType, cls) - new_class = superclass.__new__(cls, name, bases, dictionary) - return new_class - - def __init__(cls, name, bases, dictionary): - """Here we perform the majority of our work on the class. - We add enum getters, an __init__ method, implementations - of all Message methods, and properties for all fields - in the protocol type. - - Args: - name: Name of the class (ignored, but required by the - metaclass protocol). - bases: Base classes of the class we're constructing. - (Should be message.Message). We ignore this field, but - it's required by the metaclass protocol - dictionary: The class dictionary of the class we're - constructing. dictionary[_DESCRIPTOR_KEY] must contain - a Descriptor object describing this protocol message - type. - """ - descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] - - # If this is an _existing_ class looked up via `_concrete_class` in the - # __new__ method above, then we don't need to re-initialize anything. - existing_class = getattr(descriptor, '_concrete_class', None) - if existing_class: - assert existing_class is cls, ( - 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r' - % (descriptor.full_name)) - return - - cls._decoders_by_tag = {} - if (descriptor.has_options and - descriptor.GetOptions().message_set_wire_format): - cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( - decoder.MessageSetItemDecoder(descriptor), None) - - # Attach stuff to each FieldDescriptor for quick lookup later on. - for field in descriptor.fields: - _AttachFieldHelpers(cls, field) - - descriptor._concrete_class = cls # pylint: disable=protected-access - _AddEnumValues(descriptor, cls) - _AddInitMethod(descriptor, cls) - _AddPropertiesForFields(descriptor, cls) - _AddPropertiesForExtensions(descriptor, cls) - _AddStaticMethods(cls) - _AddMessageMethods(descriptor, cls) - _AddPrivateHelperMethods(descriptor, cls) - - superclass = super(GeneratedProtocolMessageType, cls) - superclass.__init__(name, bases, dictionary) - - -# Stateless helpers for GeneratedProtocolMessageType below. -# Outside clients should not access these directly. -# -# I opted not to make any of these methods on the metaclass, to make it more -# clear that I'm not really using any state there and to keep clients from -# thinking that they have direct access to these construction helpers. - - -def _PropertyName(proto_field_name): - """Returns the name of the public property attribute which - clients can use to get and (in some cases) set the value - of a protocol message field. - - Args: - proto_field_name: The protocol message field name, exactly - as it appears (or would appear) in a .proto file. - """ - # TODO(robinson): Escape Python keywords (e.g., yield), and test this support. - # nnorwitz makes my day by writing: - # """ - # FYI. See the keyword module in the stdlib. This could be as simple as: - # - # if keyword.iskeyword(proto_field_name): - # return proto_field_name + "_" - # return proto_field_name - # """ - # Kenton says: The above is a BAD IDEA. People rely on being able to use - # getattr() and setattr() to reflectively manipulate field values. If we - # rename the properties, then every such user has to also make sure to apply - # the same transformation. Note that currently if you name a field "yield", - # you can still access it just fine using getattr/setattr -- it's not even - # that cumbersome to do so. - # TODO(kenton): Remove this method entirely if/when everyone agrees with my - # position. - return proto_field_name - - -def _AddSlots(message_descriptor, dictionary): - """Adds a __slots__ entry to dictionary, containing the names of all valid - attributes for this message type. - - Args: - message_descriptor: A Descriptor instance describing this message type. - dictionary: Class dictionary to which we'll add a '__slots__' entry. - """ - dictionary['__slots__'] = ['_cached_byte_size', - '_cached_byte_size_dirty', - '_fields', - '_unknown_fields', - '_unknown_field_set', - '_is_present_in_parent', - '_listener', - '_listener_for_children', - '__weakref__', - '_oneofs'] - - -def _IsMessageSetExtension(field): - return (field.is_extension and - field.containing_type.has_options and - field.containing_type.GetOptions().message_set_wire_format and - field.type == _FieldDescriptor.TYPE_MESSAGE and - field.label == _FieldDescriptor.LABEL_OPTIONAL) - - -def _IsMapField(field): - return (field.type == _FieldDescriptor.TYPE_MESSAGE and - field.message_type.has_options and - field.message_type.GetOptions().map_entry) - - -def _IsMessageMapField(field): - value_type = field.message_type.fields_by_name['value'] - return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE - - -def _AttachFieldHelpers(cls, field_descriptor): - is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) - is_packable = (is_repeated and - wire_format.IsTypePackable(field_descriptor.type)) - is_proto3 = field_descriptor.containing_type.syntax == 'proto3' - if not is_packable: - is_packed = False - elif field_descriptor.containing_type.syntax == 'proto2': - is_packed = (field_descriptor.has_options and - field_descriptor.GetOptions().packed) - else: - has_packed_false = (field_descriptor.has_options and - field_descriptor.GetOptions().HasField('packed') and - field_descriptor.GetOptions().packed == False) - is_packed = not has_packed_false - is_map_entry = _IsMapField(field_descriptor) - - if is_map_entry: - field_encoder = encoder.MapEncoder(field_descriptor) - sizer = encoder.MapSizer(field_descriptor, - _IsMessageMapField(field_descriptor)) - elif _IsMessageSetExtension(field_descriptor): - field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) - sizer = encoder.MessageSetItemSizer(field_descriptor.number) - else: - field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( - field_descriptor.number, is_repeated, is_packed) - sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( - field_descriptor.number, is_repeated, is_packed) - - field_descriptor._encoder = field_encoder - field_descriptor._sizer = sizer - field_descriptor._default_constructor = _DefaultValueConstructorForField( - field_descriptor) - - def AddDecoder(wiretype, is_packed): - tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) - decode_type = field_descriptor.type - if (decode_type == _FieldDescriptor.TYPE_ENUM and - type_checkers.SupportsOpenEnums(field_descriptor)): - decode_type = _FieldDescriptor.TYPE_INT32 - - oneof_descriptor = None - clear_if_default = False - if field_descriptor.containing_oneof is not None: - oneof_descriptor = field_descriptor - elif (is_proto3 and not is_repeated and - field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE): - clear_if_default = True - - if is_map_entry: - is_message_map = _IsMessageMapField(field_descriptor) - - field_decoder = decoder.MapDecoder( - field_descriptor, _GetInitializeDefaultForMap(field_descriptor), - is_message_map) - elif decode_type == _FieldDescriptor.TYPE_STRING: - field_decoder = decoder.StringDecoder( - field_descriptor.number, is_repeated, is_packed, - field_descriptor, field_descriptor._default_constructor, - clear_if_default) - elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( - field_descriptor.number, is_repeated, is_packed, - field_descriptor, field_descriptor._default_constructor) - else: - field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( - field_descriptor.number, is_repeated, is_packed, - # pylint: disable=protected-access - field_descriptor, field_descriptor._default_constructor, - clear_if_default) - - cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor) - - AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], - False) - - if is_repeated and wire_format.IsTypePackable(field_descriptor.type): - # To support wire compatibility of adding packed = true, add a decoder for - # packed values regardless of the field's options. - AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True) - - -def _AddClassAttributesForNestedExtensions(descriptor, dictionary): - extensions = descriptor.extensions_by_name - for extension_name, extension_field in extensions.items(): - assert extension_name not in dictionary - dictionary[extension_name] = extension_field - - -def _AddEnumValues(descriptor, cls): - """Sets class-level attributes for all enum fields defined in this message. - - Also exporting a class-level object that can name enum values. - - Args: - descriptor: Descriptor object for this message type. - cls: Class we're constructing for this message type. - """ - for enum_type in descriptor.enum_types: - setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) - for enum_value in enum_type.values: - setattr(cls, enum_value.name, enum_value.number) - - -def _GetInitializeDefaultForMap(field): - if field.label != _FieldDescriptor.LABEL_REPEATED: - raise ValueError('map_entry set on non-repeated field %s' % ( - field.name)) - fields_by_name = field.message_type.fields_by_name - key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) - - value_field = fields_by_name['value'] - if _IsMessageMapField(field): - def MakeMessageMapDefault(message): - return containers.MessageMap( - message._listener_for_children, value_field.message_type, key_checker, - field.message_type) - return MakeMessageMapDefault - else: - value_checker = type_checkers.GetTypeChecker(value_field) - def MakePrimitiveMapDefault(message): - return containers.ScalarMap( - message._listener_for_children, key_checker, value_checker, - field.message_type) - return MakePrimitiveMapDefault - -def _DefaultValueConstructorForField(field): - """Returns a function which returns a default value for a field. - - Args: - field: FieldDescriptor object for this field. - - The returned function has one argument: - message: Message instance containing this field, or a weakref proxy - of same. - - That function in turn returns a default value for this field. The default - value may refer back to |message| via a weak reference. - """ - - if _IsMapField(field): - return _GetInitializeDefaultForMap(field) - - if field.label == _FieldDescriptor.LABEL_REPEATED: - if field.has_default_value and field.default_value != []: - raise ValueError('Repeated field default value not empty list: %s' % ( - field.default_value)) - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - # We can't look at _concrete_class yet since it might not have - # been set. (Depends on order in which we initialize the classes). - message_type = field.message_type - def MakeRepeatedMessageDefault(message): - return containers.RepeatedCompositeFieldContainer( - message._listener_for_children, field.message_type) - return MakeRepeatedMessageDefault - else: - type_checker = type_checkers.GetTypeChecker(field) - def MakeRepeatedScalarDefault(message): - return containers.RepeatedScalarFieldContainer( - message._listener_for_children, type_checker) - return MakeRepeatedScalarDefault - - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - # _concrete_class may not yet be initialized. - message_type = field.message_type - def MakeSubMessageDefault(message): - assert getattr(message_type, '_concrete_class', None), ( - 'Uninitialized concrete class found for field %r (message type %r)' - % (field.full_name, message_type.full_name)) - result = message_type._concrete_class() - result._SetListener( - _OneofListener(message, field) - if field.containing_oneof is not None - else message._listener_for_children) - return result - return MakeSubMessageDefault - - def MakeScalarDefault(message): - # TODO(protobuf-team): This may be broken since there may not be - # default_value. Combine with has_default_value somehow. - return field.default_value - return MakeScalarDefault - - -def _ReraiseTypeErrorWithFieldName(message_name, field_name): - """Re-raise the currently-handled TypeError with the field name added.""" - exc = sys.exc_info()[1] - if len(exc.args) == 1 and type(exc) is TypeError: - # simple TypeError; add field name to exception message - exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) - - # re-raise possibly-amended exception with original traceback: - raise exc.with_traceback(sys.exc_info()[2]) - - -def _AddInitMethod(message_descriptor, cls): - """Adds an __init__ method to cls.""" - - def _GetIntegerEnumValue(enum_type, value): - """Convert a string or integer enum value to an integer. - - If the value is a string, it is converted to the enum value in - enum_type with the same name. If the value is not a string, it's - returned as-is. (No conversion or bounds-checking is done.) - """ - if isinstance(value, str): - try: - return enum_type.values_by_name[value].number - except KeyError: - raise ValueError('Enum type %s: unknown label "%s"' % ( - enum_type.full_name, value)) - return value - - def init(self, **kwargs): - self._cached_byte_size = 0 - self._cached_byte_size_dirty = len(kwargs) > 0 - self._fields = {} - # Contains a mapping from oneof field descriptors to the descriptor - # of the currently set field in that oneof field. - self._oneofs = {} - - # _unknown_fields is () when empty for efficiency, and will be turned into - # a list if fields are added. - self._unknown_fields = () - # _unknown_field_set is None when empty for efficiency, and will be - # turned into UnknownFieldSet struct if fields are added. - self._unknown_field_set = None # pylint: disable=protected-access - self._is_present_in_parent = False - self._listener = message_listener_mod.NullMessageListener() - self._listener_for_children = _Listener(self) - for field_name, field_value in kwargs.items(): - field = _GetFieldByName(message_descriptor, field_name) - if field is None: - raise TypeError('%s() got an unexpected keyword argument "%s"' % - (message_descriptor.name, field_name)) - if field_value is None: - # field=None is the same as no field at all. - continue - if field.label == _FieldDescriptor.LABEL_REPEATED: - copy = field._default_constructor(self) - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite - if _IsMapField(field): - if _IsMessageMapField(field): - for key in field_value: - copy[key].MergeFrom(field_value[key]) - else: - copy.update(field_value) - else: - for val in field_value: - if isinstance(val, dict): - copy.add(**val) - else: - copy.add().MergeFrom(val) - else: # Scalar - if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: - field_value = [_GetIntegerEnumValue(field.enum_type, val) - for val in field_value] - copy.extend(field_value) - self._fields[field] = copy - elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - copy = field._default_constructor(self) - new_val = field_value - if isinstance(field_value, dict): - new_val = field.message_type._concrete_class(**field_value) - try: - copy.MergeFrom(new_val) - except TypeError: - _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) - self._fields[field] = copy - else: - if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: - field_value = _GetIntegerEnumValue(field.enum_type, field_value) - try: - setattr(self, field_name, field_value) - except TypeError: - _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) - - init.__module__ = None - init.__doc__ = None - cls.__init__ = init - - -def _GetFieldByName(message_descriptor, field_name): - """Returns a field descriptor by field name. - - Args: - message_descriptor: A Descriptor describing all fields in message. - field_name: The name of the field to retrieve. - Returns: - The field descriptor associated with the field name. - """ - try: - return message_descriptor.fields_by_name[field_name] - except KeyError: - raise ValueError('Protocol message %s has no "%s" field.' % - (message_descriptor.name, field_name)) - - -def _AddPropertiesForFields(descriptor, cls): - """Adds properties for all fields in this protocol message type.""" - for field in descriptor.fields: - _AddPropertiesForField(field, cls) - - if descriptor.is_extendable: - # _ExtensionDict is just an adaptor with no state so we allocate a new one - # every time it is accessed. - cls.Extensions = property(lambda self: _ExtensionDict(self)) - - -def _AddPropertiesForField(field, cls): - """Adds a public property for a protocol message field. - Clients can use this property to get and (in the case - of non-repeated scalar fields) directly set the value - of a protocol message field. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - # Catch it if we add other types that we should - # handle specially here. - assert _FieldDescriptor.MAX_CPPTYPE == 10 - - constant_name = field.name.upper() + '_FIELD_NUMBER' - setattr(cls, constant_name, field.number) - - if field.label == _FieldDescriptor.LABEL_REPEATED: - _AddPropertiesForRepeatedField(field, cls) - elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - _AddPropertiesForNonRepeatedCompositeField(field, cls) - else: - _AddPropertiesForNonRepeatedScalarField(field, cls) - - -class _FieldProperty(property): - __slots__ = ('DESCRIPTOR',) - - def __init__(self, descriptor, getter, setter, doc): - property.__init__(self, getter, setter, doc=doc) - self.DESCRIPTOR = descriptor - - -def _AddPropertiesForRepeatedField(field, cls): - """Adds a public property for a "repeated" protocol message field. Clients - can use this property to get the value of the field, which will be either a - RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see - below). - - Note that when clients add values to these containers, we perform - type-checking in the case of repeated scalar fields, and we also set any - necessary "has" bits as a side-effect. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - proto_field_name = field.name - property_name = _PropertyName(proto_field_name) - - def getter(self): - field_value = self._fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - - # Atomically check if another thread has preempted us and, if not, swap - # in the new object we just created. If someone has preempted us, we - # take that object and discard ours. - # WARNING: We are relying on setdefault() being atomic. This is true - # in CPython but we haven't investigated others. This warning appears - # in several other locations in this file. - field_value = self._fields.setdefault(field, field_value) - return field_value - getter.__module__ = None - getter.__doc__ = 'Getter for %s.' % proto_field_name - - # We define a setter just so we can throw an exception with a more - # helpful error message. - def setter(self, new_value): - raise AttributeError('Assignment not allowed to repeated field ' - '"%s" in protocol message object.' % proto_field_name) - - doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name - setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) - - -def _AddPropertiesForNonRepeatedScalarField(field, cls): - """Adds a public property for a nonrepeated, scalar protocol message field. - Clients can use this property to get and directly set the value of the field. - Note that when the client sets the value of a field by using this property, - all necessary "has" bits are set as a side-effect, and we also perform - type-checking. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - proto_field_name = field.name - property_name = _PropertyName(proto_field_name) - type_checker = type_checkers.GetTypeChecker(field) - default_value = field.default_value - is_proto3 = field.containing_type.syntax == 'proto3' - - def getter(self): - # TODO(protobuf-team): This may be broken since there may not be - # default_value. Combine with has_default_value somehow. - return self._fields.get(field, default_value) - getter.__module__ = None - getter.__doc__ = 'Getter for %s.' % proto_field_name - - clear_when_set_to_default = is_proto3 and not field.containing_oneof - - def field_setter(self, new_value): - # pylint: disable=protected-access - # Testing the value for truthiness captures all of the proto3 defaults - # (0, 0.0, enum 0, and False). - try: - new_value = type_checker.CheckValue(new_value) - except TypeError as e: - raise TypeError( - 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e)) - if clear_when_set_to_default and not new_value: - self._fields.pop(field, None) - else: - self._fields[field] = new_value - # Check _cached_byte_size_dirty inline to improve performance, since scalar - # setters are called frequently. - if not self._cached_byte_size_dirty: - self._Modified() - - if field.containing_oneof: - def setter(self, new_value): - field_setter(self, new_value) - self._UpdateOneofState(field) - else: - setter = field_setter - - setter.__module__ = None - setter.__doc__ = 'Setter for %s.' % proto_field_name - - # Add a property to encapsulate the getter/setter. - doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name - setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) - - -def _AddPropertiesForNonRepeatedCompositeField(field, cls): - """Adds a public property for a nonrepeated, composite protocol message field. - A composite field is a "group" or "message" field. - - Clients can use this property to get the value of the field, but cannot - assign to the property directly. - - Args: - field: A FieldDescriptor for this field. - cls: The class we're constructing. - """ - # TODO(robinson): Remove duplication with similar method - # for non-repeated scalars. - proto_field_name = field.name - property_name = _PropertyName(proto_field_name) - - def getter(self): - field_value = self._fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - - # Atomically check if another thread has preempted us and, if not, swap - # in the new object we just created. If someone has preempted us, we - # take that object and discard ours. - # WARNING: We are relying on setdefault() being atomic. This is true - # in CPython but we haven't investigated others. This warning appears - # in several other locations in this file. - field_value = self._fields.setdefault(field, field_value) - return field_value - getter.__module__ = None - getter.__doc__ = 'Getter for %s.' % proto_field_name - - # We define a setter just so we can throw an exception with a more - # helpful error message. - def setter(self, new_value): - raise AttributeError('Assignment not allowed to composite field ' - '"%s" in protocol message object.' % proto_field_name) - - # Add a property to encapsulate the getter. - doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name - setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) - - -def _AddPropertiesForExtensions(descriptor, cls): - """Adds properties for all fields in this protocol message type.""" - extensions = descriptor.extensions_by_name - for extension_name, extension_field in extensions.items(): - constant_name = extension_name.upper() + '_FIELD_NUMBER' - setattr(cls, constant_name, extension_field.number) - - # TODO(amauryfa): Migrate all users of these attributes to functions like - # pool.FindExtensionByNumber(descriptor). - if descriptor.file is not None: - # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. - pool = descriptor.file.pool - cls._extensions_by_number = pool._extensions_by_number[descriptor] - cls._extensions_by_name = pool._extensions_by_name[descriptor] - -def _AddStaticMethods(cls): - # TODO(robinson): This probably needs to be thread-safe(?) - def RegisterExtension(extension_handle): - extension_handle.containing_type = cls.DESCRIPTOR - # TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available. - # pylint: disable=protected-access - cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle) - _AttachFieldHelpers(cls, extension_handle) - cls.RegisterExtension = staticmethod(RegisterExtension) - - def FromString(s): - message = cls() - message.MergeFromString(s) - return message - cls.FromString = staticmethod(FromString) - - -def _IsPresent(item): - """Given a (FieldDescriptor, value) tuple from _fields, return true if the - value should be included in the list returned by ListFields().""" - - if item[0].label == _FieldDescriptor.LABEL_REPEATED: - return bool(item[1]) - elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - return item[1]._is_present_in_parent - else: - return True - - -def _AddListFieldsMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def ListFields(self): - all_fields = [item for item in self._fields.items() if _IsPresent(item)] - all_fields.sort(key = lambda item: item[0].number) - return all_fields - - cls.ListFields = ListFields - -_PROTO3_ERROR_TEMPLATE = \ - ('Protocol message %s has no non-repeated submessage field "%s" ' - 'nor marked as optional') -_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"' - -def _AddHasFieldMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - is_proto3 = (message_descriptor.syntax == "proto3") - error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE - - hassable_fields = {} - for field in message_descriptor.fields: - if field.label == _FieldDescriptor.LABEL_REPEATED: - continue - # For proto3, only submessages and fields inside a oneof have presence. - if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and - not field.containing_oneof): - continue - hassable_fields[field.name] = field - - # Has methods are supported for oneof descriptors. - for oneof in message_descriptor.oneofs: - hassable_fields[oneof.name] = oneof - - def HasField(self, field_name): - try: - field = hassable_fields[field_name] - except KeyError: - raise ValueError(error_msg % (message_descriptor.full_name, field_name)) - - if isinstance(field, descriptor_mod.OneofDescriptor): - try: - return HasField(self, self._oneofs[field].name) - except KeyError: - return False - else: - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - value = self._fields.get(field) - return value is not None and value._is_present_in_parent - else: - return field in self._fields - - cls.HasField = HasField - - -def _AddClearFieldMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def ClearField(self, field_name): - try: - field = message_descriptor.fields_by_name[field_name] - except KeyError: - try: - field = message_descriptor.oneofs_by_name[field_name] - if field in self._oneofs: - field = self._oneofs[field] - else: - return - except KeyError: - raise ValueError('Protocol message %s has no "%s" field.' % - (message_descriptor.name, field_name)) - - if field in self._fields: - # To match the C++ implementation, we need to invalidate iterators - # for map fields when ClearField() happens. - if hasattr(self._fields[field], 'InvalidateIterators'): - self._fields[field].InvalidateIterators() - - # Note: If the field is a sub-message, its listener will still point - # at us. That's fine, because the worst than can happen is that it - # will call _Modified() and invalidate our byte size. Big deal. - del self._fields[field] - - if self._oneofs.get(field.containing_oneof, None) is field: - del self._oneofs[field.containing_oneof] - - # Always call _Modified() -- even if nothing was changed, this is - # a mutating method, and thus calling it should cause the field to become - # present in the parent message. - self._Modified() - - cls.ClearField = ClearField - - -def _AddClearExtensionMethod(cls): - """Helper for _AddMessageMethods().""" - def ClearExtension(self, extension_handle): - extension_dict._VerifyExtensionHandle(self, extension_handle) - - # Similar to ClearField(), above. - if extension_handle in self._fields: - del self._fields[extension_handle] - self._Modified() - cls.ClearExtension = ClearExtension - - -def _AddHasExtensionMethod(cls): - """Helper for _AddMessageMethods().""" - def HasExtension(self, extension_handle): - extension_dict._VerifyExtensionHandle(self, extension_handle) - if extension_handle.label == _FieldDescriptor.LABEL_REPEATED: - raise KeyError('"%s" is repeated.' % extension_handle.full_name) - - if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - value = self._fields.get(extension_handle) - return value is not None and value._is_present_in_parent - else: - return extension_handle in self._fields - cls.HasExtension = HasExtension - -def _InternalUnpackAny(msg): - """Unpacks Any message and returns the unpacked message. - - This internal method is different from public Any Unpack method which takes - the target message as argument. _InternalUnpackAny method does not have - target message type and need to find the message type in descriptor pool. - - Args: - msg: An Any message to be unpacked. - - Returns: - The unpacked message. - """ - # TODO(amauryfa): Don't use the factory of generated messages. - # To make Any work with custom factories, use the message factory of the - # parent message. - # pylint: disable=g-import-not-at-top - from google.protobuf import symbol_database - factory = symbol_database.Default() - - type_url = msg.type_url - - if not type_url: - return None - - # TODO(haberman): For now we just strip the hostname. Better logic will be - # required. - type_name = type_url.split('/')[-1] - descriptor = factory.pool.FindMessageTypeByName(type_name) - - if descriptor is None: - return None - - message_class = factory.GetPrototype(descriptor) - message = message_class() - - message.ParseFromString(msg.value) - return message - - -def _AddEqualsMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def __eq__(self, other): - if (not isinstance(other, message_mod.Message) or - other.DESCRIPTOR != self.DESCRIPTOR): - return False - - if self is other: - return True - - if self.DESCRIPTOR.full_name == _AnyFullTypeName: - any_a = _InternalUnpackAny(self) - any_b = _InternalUnpackAny(other) - if any_a and any_b: - return any_a == any_b - - if not self.ListFields() == other.ListFields(): - return False - - # TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions, - # then use it for the comparison. - unknown_fields = list(self._unknown_fields) - unknown_fields.sort() - other_unknown_fields = list(other._unknown_fields) - other_unknown_fields.sort() - return unknown_fields == other_unknown_fields - - cls.__eq__ = __eq__ - - -def _AddStrMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def __str__(self): - return text_format.MessageToString(self) - cls.__str__ = __str__ - - -def _AddReprMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def __repr__(self): - return text_format.MessageToString(self) - cls.__repr__ = __repr__ - - -def _AddUnicodeMethod(unused_message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def __unicode__(self): - return text_format.MessageToString(self, as_utf8=True).decode('utf-8') - cls.__unicode__ = __unicode__ - - -def _BytesForNonRepeatedElement(value, field_number, field_type): - """Returns the number of bytes needed to serialize a non-repeated element. - The returned byte count includes space for tag information and any - other additional space associated with serializing value. - - Args: - value: Value we're serializing. - field_number: Field number of this value. (Since the field number - is stored as part of a varint-encoded tag, this has an impact - on the total bytes required to serialize the value). - field_type: The type of the field. One of the TYPE_* constants - within FieldDescriptor. - """ - try: - fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] - return fn(field_number, value) - except KeyError: - raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) - - -def _AddByteSizeMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def ByteSize(self): - if not self._cached_byte_size_dirty: - return self._cached_byte_size - - size = 0 - descriptor = self.DESCRIPTOR - if descriptor.GetOptions().map_entry: - # Fields of map entry should always be serialized. - size = descriptor.fields_by_name['key']._sizer(self.key) - size += descriptor.fields_by_name['value']._sizer(self.value) - else: - for field_descriptor, field_value in self.ListFields(): - size += field_descriptor._sizer(field_value) - for tag_bytes, value_bytes in self._unknown_fields: - size += len(tag_bytes) + len(value_bytes) - - self._cached_byte_size = size - self._cached_byte_size_dirty = False - self._listener_for_children.dirty = False - return size - - cls.ByteSize = ByteSize - - -def _AddSerializeToStringMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def SerializeToString(self, **kwargs): - # Check if the message has all of its required fields set. - if not self.IsInitialized(): - raise message_mod.EncodeError( - 'Message %s is missing required fields: %s' % ( - self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) - return self.SerializePartialToString(**kwargs) - cls.SerializeToString = SerializeToString - - -def _AddSerializePartialToStringMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - - def SerializePartialToString(self, **kwargs): - out = BytesIO() - self._InternalSerialize(out.write, **kwargs) - return out.getvalue() - cls.SerializePartialToString = SerializePartialToString - - def InternalSerialize(self, write_bytes, deterministic=None): - if deterministic is None: - deterministic = ( - api_implementation.IsPythonDefaultSerializationDeterministic()) - else: - deterministic = bool(deterministic) - - descriptor = self.DESCRIPTOR - if descriptor.GetOptions().map_entry: - # Fields of map entry should always be serialized. - descriptor.fields_by_name['key']._encoder( - write_bytes, self.key, deterministic) - descriptor.fields_by_name['value']._encoder( - write_bytes, self.value, deterministic) - else: - for field_descriptor, field_value in self.ListFields(): - field_descriptor._encoder(write_bytes, field_value, deterministic) - for tag_bytes, value_bytes in self._unknown_fields: - write_bytes(tag_bytes) - write_bytes(value_bytes) - cls._InternalSerialize = InternalSerialize - - -def _AddMergeFromStringMethod(message_descriptor, cls): - """Helper for _AddMessageMethods().""" - def MergeFromString(self, serialized): - serialized = memoryview(serialized) - length = len(serialized) - try: - if self._InternalParse(serialized, 0, length) != length: - # The only reason _InternalParse would return early is if it - # encountered an end-group tag. - raise message_mod.DecodeError('Unexpected end-group tag.') - except (IndexError, TypeError): - # Now ord(buf[p:p+1]) == ord('') gets TypeError. - raise message_mod.DecodeError('Truncated message.') - except struct.error as e: - raise message_mod.DecodeError(e) - return length # Return this for legacy reasons. - cls.MergeFromString = MergeFromString - - local_ReadTag = decoder.ReadTag - local_SkipField = decoder.SkipField - decoders_by_tag = cls._decoders_by_tag - - def InternalParse(self, buffer, pos, end): - """Create a message from serialized bytes. - - Args: - self: Message, instance of the proto message object. - buffer: memoryview of the serialized data. - pos: int, position to start in the serialized data. - end: int, end position of the serialized data. - - Returns: - Message object. - """ - # Guard against internal misuse, since this function is called internally - # quite extensively, and its easy to accidentally pass bytes. - assert isinstance(buffer, memoryview) - self._Modified() - field_dict = self._fields - # pylint: disable=protected-access - unknown_field_set = self._unknown_field_set - while pos != end: - (tag_bytes, new_pos) = local_ReadTag(buffer, pos) - field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None)) - if field_decoder is None: - if not self._unknown_fields: # pylint: disable=protected-access - self._unknown_fields = [] # pylint: disable=protected-access - if unknown_field_set is None: - # pylint: disable=protected-access - self._unknown_field_set = containers.UnknownFieldSet() - # pylint: disable=protected-access - unknown_field_set = self._unknown_field_set - # pylint: disable=protected-access - (tag, _) = decoder._DecodeVarint(tag_bytes, 0) - field_number, wire_type = wire_format.UnpackTag(tag) - if field_number == 0: - raise message_mod.DecodeError('Field number 0 is illegal.') - # TODO(jieluo): remove old_pos. - old_pos = new_pos - (data, new_pos) = decoder._DecodeUnknownField( - buffer, new_pos, wire_type) # pylint: disable=protected-access - if new_pos == -1: - return pos - # pylint: disable=protected-access - unknown_field_set._add(field_number, wire_type, data) - # TODO(jieluo): remove _unknown_fields. - new_pos = local_SkipField(buffer, old_pos, end, tag_bytes) - if new_pos == -1: - return pos - self._unknown_fields.append( - (tag_bytes, buffer[old_pos:new_pos].tobytes())) - pos = new_pos - else: - pos = field_decoder(buffer, new_pos, end, self, field_dict) - if field_desc: - self._UpdateOneofState(field_desc) - return pos - cls._InternalParse = InternalParse - - -def _AddIsInitializedMethod(message_descriptor, cls): - """Adds the IsInitialized and FindInitializationError methods to the - protocol message class.""" - - required_fields = [field for field in message_descriptor.fields - if field.label == _FieldDescriptor.LABEL_REQUIRED] - - def IsInitialized(self, errors=None): - """Checks if all required fields of a message are set. - - Args: - errors: A list which, if provided, will be populated with the field - paths of all missing required fields. - - Returns: - True iff the specified message has all required fields set. - """ - - # Performance is critical so we avoid HasField() and ListFields(). - - for field in required_fields: - if (field not in self._fields or - (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and - not self._fields[field]._is_present_in_parent)): - if errors is not None: - errors.extend(self.FindInitializationErrors()) - return False - - for field, value in list(self._fields.items()): # dict can change size! - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - if field.label == _FieldDescriptor.LABEL_REPEATED: - if (field.message_type.has_options and - field.message_type.GetOptions().map_entry): - continue - for element in value: - if not element.IsInitialized(): - if errors is not None: - errors.extend(self.FindInitializationErrors()) - return False - elif value._is_present_in_parent and not value.IsInitialized(): - if errors is not None: - errors.extend(self.FindInitializationErrors()) - return False - - return True - - cls.IsInitialized = IsInitialized - - def FindInitializationErrors(self): - """Finds required fields which are not initialized. - - Returns: - A list of strings. Each string is a path to an uninitialized field from - the top-level message, e.g. "foo.bar[5].baz". - """ - - errors = [] # simplify things - - for field in required_fields: - if not self.HasField(field.name): - errors.append(field.name) - - for field, value in self.ListFields(): - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - if field.is_extension: - name = '(%s)' % field.full_name - else: - name = field.name - - if _IsMapField(field): - if _IsMessageMapField(field): - for key in value: - element = value[key] - prefix = '%s[%s].' % (name, key) - sub_errors = element.FindInitializationErrors() - errors += [prefix + error for error in sub_errors] - else: - # ScalarMaps can't have any initialization errors. - pass - elif field.label == _FieldDescriptor.LABEL_REPEATED: - for i in range(len(value)): - element = value[i] - prefix = '%s[%d].' % (name, i) - sub_errors = element.FindInitializationErrors() - errors += [prefix + error for error in sub_errors] - else: - prefix = name + '.' - sub_errors = value.FindInitializationErrors() - errors += [prefix + error for error in sub_errors] - - return errors - - cls.FindInitializationErrors = FindInitializationErrors - - -def _FullyQualifiedClassName(klass): - module = klass.__module__ - name = getattr(klass, '__qualname__', klass.__name__) - if module in (None, 'builtins', '__builtin__'): - return name - return module + '.' + name - - -def _AddMergeFromMethod(cls): - LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED - CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE - - def MergeFrom(self, msg): - if not isinstance(msg, cls): - raise TypeError( - 'Parameter to MergeFrom() must be instance of same class: ' - 'expected %s got %s.' % (_FullyQualifiedClassName(cls), - _FullyQualifiedClassName(msg.__class__))) - - assert msg is not self - self._Modified() - - fields = self._fields - - for field, value in msg._fields.items(): - if field.label == LABEL_REPEATED: - field_value = fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - fields[field] = field_value - field_value.MergeFrom(value) - elif field.cpp_type == CPPTYPE_MESSAGE: - if value._is_present_in_parent: - field_value = fields.get(field) - if field_value is None: - # Construct a new object to represent this field. - field_value = field._default_constructor(self) - fields[field] = field_value - field_value.MergeFrom(value) - else: - self._fields[field] = value - if field.containing_oneof: - self._UpdateOneofState(field) - - if msg._unknown_fields: - if not self._unknown_fields: - self._unknown_fields = [] - self._unknown_fields.extend(msg._unknown_fields) - # pylint: disable=protected-access - if self._unknown_field_set is None: - self._unknown_field_set = containers.UnknownFieldSet() - self._unknown_field_set._extend(msg._unknown_field_set) - - cls.MergeFrom = MergeFrom - - -def _AddWhichOneofMethod(message_descriptor, cls): - def WhichOneof(self, oneof_name): - """Returns the name of the currently set field inside a oneof, or None.""" - try: - field = message_descriptor.oneofs_by_name[oneof_name] - except KeyError: - raise ValueError( - 'Protocol message has no oneof "%s" field.' % oneof_name) - - nested_field = self._oneofs.get(field, None) - if nested_field is not None and self.HasField(nested_field.name): - return nested_field.name - else: - return None - - cls.WhichOneof = WhichOneof - - -def _Clear(self): - # Clear fields. - self._fields = {} - self._unknown_fields = () - # pylint: disable=protected-access - if self._unknown_field_set is not None: - self._unknown_field_set._clear() - self._unknown_field_set = None - - self._oneofs = {} - self._Modified() - - -def _UnknownFields(self): - if self._unknown_field_set is None: # pylint: disable=protected-access - # pylint: disable=protected-access - self._unknown_field_set = containers.UnknownFieldSet() - return self._unknown_field_set # pylint: disable=protected-access - - -def _DiscardUnknownFields(self): - self._unknown_fields = [] - self._unknown_field_set = None # pylint: disable=protected-access - for field, value in self.ListFields(): - if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: - if _IsMapField(field): - if _IsMessageMapField(field): - for key in value: - value[key].DiscardUnknownFields() - elif field.label == _FieldDescriptor.LABEL_REPEATED: - for sub_message in value: - sub_message.DiscardUnknownFields() - else: - value.DiscardUnknownFields() - - -def _SetListener(self, listener): - if listener is None: - self._listener = message_listener_mod.NullMessageListener() - else: - self._listener = listener - - -def _AddMessageMethods(message_descriptor, cls): - """Adds implementations of all Message methods to cls.""" - _AddListFieldsMethod(message_descriptor, cls) - _AddHasFieldMethod(message_descriptor, cls) - _AddClearFieldMethod(message_descriptor, cls) - if message_descriptor.is_extendable: - _AddClearExtensionMethod(cls) - _AddHasExtensionMethod(cls) - _AddEqualsMethod(message_descriptor, cls) - _AddStrMethod(message_descriptor, cls) - _AddReprMethod(message_descriptor, cls) - _AddUnicodeMethod(message_descriptor, cls) - _AddByteSizeMethod(message_descriptor, cls) - _AddSerializeToStringMethod(message_descriptor, cls) - _AddSerializePartialToStringMethod(message_descriptor, cls) - _AddMergeFromStringMethod(message_descriptor, cls) - _AddIsInitializedMethod(message_descriptor, cls) - _AddMergeFromMethod(cls) - _AddWhichOneofMethod(message_descriptor, cls) - # Adds methods which do not depend on cls. - cls.Clear = _Clear - cls.UnknownFields = _UnknownFields - cls.DiscardUnknownFields = _DiscardUnknownFields - cls._SetListener = _SetListener - - -def _AddPrivateHelperMethods(message_descriptor, cls): - """Adds implementation of private helper methods to cls.""" - - def Modified(self): - """Sets the _cached_byte_size_dirty bit to true, - and propagates this to our listener iff this was a state change. - """ - - # Note: Some callers check _cached_byte_size_dirty before calling - # _Modified() as an extra optimization. So, if this method is ever - # changed such that it does stuff even when _cached_byte_size_dirty is - # already true, the callers need to be updated. - if not self._cached_byte_size_dirty: - self._cached_byte_size_dirty = True - self._listener_for_children.dirty = True - self._is_present_in_parent = True - self._listener.Modified() - - def _UpdateOneofState(self, field): - """Sets field as the active field in its containing oneof. - - Will also delete currently active field in the oneof, if it is different - from the argument. Does not mark the message as modified. - """ - other_field = self._oneofs.setdefault(field.containing_oneof, field) - if other_field is not field: - del self._fields[other_field] - self._oneofs[field.containing_oneof] = field - - cls._Modified = Modified - cls.SetInParent = Modified - cls._UpdateOneofState = _UpdateOneofState - - -class _Listener(object): - - """MessageListener implementation that a parent message registers with its - child message. - - In order to support semantics like: - - foo.bar.baz.qux = 23 - assert foo.HasField('bar') - - ...child objects must have back references to their parents. - This helper class is at the heart of this support. - """ - - def __init__(self, parent_message): - """Args: - parent_message: The message whose _Modified() method we should call when - we receive Modified() messages. - """ - # This listener establishes a back reference from a child (contained) object - # to its parent (containing) object. We make this a weak reference to avoid - # creating cyclic garbage when the client finishes with the 'parent' object - # in the tree. - if isinstance(parent_message, weakref.ProxyType): - self._parent_message_weakref = parent_message - else: - self._parent_message_weakref = weakref.proxy(parent_message) - - # As an optimization, we also indicate directly on the listener whether - # or not the parent message is dirty. This way we can avoid traversing - # up the tree in the common case. - self.dirty = False - - def Modified(self): - if self.dirty: - return - try: - # Propagate the signal to our parents iff this is the first field set. - self._parent_message_weakref._Modified() - except ReferenceError: - # We can get here if a client has kept a reference to a child object, - # and is now setting a field on it, but the child's parent has been - # garbage-collected. This is not an error. - pass - - -class _OneofListener(_Listener): - """Special listener implementation for setting composite oneof fields.""" - - def __init__(self, parent_message, field): - """Args: - parent_message: The message whose _Modified() method we should call when - we receive Modified() messages. - field: The descriptor of the field being set in the parent message. - """ - super(_OneofListener, self).__init__(parent_message) - self._field = field - - def Modified(self): - """Also updates the state of the containing oneof in the parent message.""" - try: - self._parent_message_weakref._UpdateOneofState(self._field) - super(_OneofListener, self).Modified() - except ReferenceError: - pass diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/type_checkers.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/type_checkers.py deleted file mode 100644 index a53e71fe8e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/type_checkers.py +++ /dev/null @@ -1,435 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Provides type checking routines. - -This module defines type checking utilities in the forms of dictionaries: - -VALUE_CHECKERS: A dictionary of field types and a value validation object. -TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing - function. -TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization - function. -FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their - corresponding wire types. -TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization - function. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - -import ctypes -import numbers - -from google.protobuf.internal import decoder -from google.protobuf.internal import encoder -from google.protobuf.internal import wire_format -from google.protobuf import descriptor - -_FieldDescriptor = descriptor.FieldDescriptor - - -def TruncateToFourByteFloat(original): - return ctypes.c_float(original).value - - -def ToShortestFloat(original): - """Returns the shortest float that has same value in wire.""" - # All 4 byte floats have between 6 and 9 significant digits, so we - # start with 6 as the lower bound. - # It has to be iterative because use '.9g' directly can not get rid - # of the noises for most values. For example if set a float_field=0.9 - # use '.9g' will print 0.899999976. - precision = 6 - rounded = float('{0:.{1}g}'.format(original, precision)) - while TruncateToFourByteFloat(rounded) != original: - precision += 1 - rounded = float('{0:.{1}g}'.format(original, precision)) - return rounded - - -def SupportsOpenEnums(field_descriptor): - return field_descriptor.containing_type.syntax == 'proto3' - - -def GetTypeChecker(field): - """Returns a type checker for a message field of the specified types. - - Args: - field: FieldDescriptor object for this field. - - Returns: - An instance of TypeChecker which can be used to verify the types - of values assigned to a field of the specified type. - """ - if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and - field.type == _FieldDescriptor.TYPE_STRING): - return UnicodeValueChecker() - if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: - if SupportsOpenEnums(field): - # When open enums are supported, any int32 can be assigned. - return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] - else: - return EnumValueChecker(field.enum_type) - return _VALUE_CHECKERS[field.cpp_type] - - -# None of the typecheckers below make any attempt to guard against people -# subclassing builtin types and doing weird things. We're not trying to -# protect against malicious clients here, just people accidentally shooting -# themselves in the foot in obvious ways. -class TypeChecker(object): - - """Type checker used to catch type errors as early as possible - when the client is setting scalar fields in protocol messages. - """ - - def __init__(self, *acceptable_types): - self._acceptable_types = acceptable_types - - def CheckValue(self, proposed_value): - """Type check the provided value and return it. - - The returned value might have been normalized to another type. - """ - if not isinstance(proposed_value, self._acceptable_types): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), self._acceptable_types)) - raise TypeError(message) - return proposed_value - - -class TypeCheckerWithDefault(TypeChecker): - - def __init__(self, default_value, *acceptable_types): - TypeChecker.__init__(self, *acceptable_types) - self._default_value = default_value - - def DefaultValue(self): - return self._default_value - - -class BoolValueChecker(object): - """Type checker used for bool fields.""" - - def CheckValue(self, proposed_value): - if not hasattr(proposed_value, '__index__') or ( - type(proposed_value).__module__ == 'numpy' and - type(proposed_value).__name__ == 'ndarray'): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (bool, int))) - raise TypeError(message) - return bool(proposed_value) - - def DefaultValue(self): - return False - - -# IntValueChecker and its subclasses perform integer type-checks -# and bounds-checks. -class IntValueChecker(object): - - """Checker used for integer fields. Performs type-check and range check.""" - - def CheckValue(self, proposed_value): - if not hasattr(proposed_value, '__index__') or ( - type(proposed_value).__module__ == 'numpy' and - type(proposed_value).__name__ == 'ndarray'): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (int,))) - raise TypeError(message) - - if not self._MIN <= int(proposed_value) <= self._MAX: - raise ValueError('Value out of range: %d' % proposed_value) - # We force all values to int to make alternate implementations where the - # distinction is more significant (e.g. the C++ implementation) simpler. - proposed_value = int(proposed_value) - return proposed_value - - def DefaultValue(self): - return 0 - - -class EnumValueChecker(object): - - """Checker used for enum fields. Performs type-check and range check.""" - - def __init__(self, enum_type): - self._enum_type = enum_type - - def CheckValue(self, proposed_value): - if not isinstance(proposed_value, numbers.Integral): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (int,))) - raise TypeError(message) - if int(proposed_value) not in self._enum_type.values_by_number: - raise ValueError('Unknown enum value: %d' % proposed_value) - return proposed_value - - def DefaultValue(self): - return self._enum_type.values[0].number - - -class UnicodeValueChecker(object): - - """Checker used for string fields. - - Always returns a unicode value, even if the input is of type str. - """ - - def CheckValue(self, proposed_value): - if not isinstance(proposed_value, (bytes, str)): - message = ('%.1024r has type %s, but expected one of: %s' % - (proposed_value, type(proposed_value), (bytes, str))) - raise TypeError(message) - - # If the value is of type 'bytes' make sure that it is valid UTF-8 data. - if isinstance(proposed_value, bytes): - try: - proposed_value = proposed_value.decode('utf-8') - except UnicodeDecodeError: - raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' - 'encoding. Non-UTF-8 strings must be converted to ' - 'unicode objects before being added.' % - (proposed_value)) - else: - try: - proposed_value.encode('utf8') - except UnicodeEncodeError: - raise ValueError('%.1024r isn\'t a valid unicode string and ' - 'can\'t be encoded in UTF-8.'% - (proposed_value)) - - return proposed_value - - def DefaultValue(self): - return u"" - - -class Int32ValueChecker(IntValueChecker): - # We're sure to use ints instead of longs here since comparison may be more - # efficient. - _MIN = -2147483648 - _MAX = 2147483647 - - -class Uint32ValueChecker(IntValueChecker): - _MIN = 0 - _MAX = (1 << 32) - 1 - - -class Int64ValueChecker(IntValueChecker): - _MIN = -(1 << 63) - _MAX = (1 << 63) - 1 - - -class Uint64ValueChecker(IntValueChecker): - _MIN = 0 - _MAX = (1 << 64) - 1 - - -# The max 4 bytes float is about 3.4028234663852886e+38 -_FLOAT_MAX = float.fromhex('0x1.fffffep+127') -_FLOAT_MIN = -_FLOAT_MAX -_INF = float('inf') -_NEG_INF = float('-inf') - - -class DoubleValueChecker(object): - """Checker used for double fields. - - Performs type-check and range check. - """ - - def CheckValue(self, proposed_value): - """Check and convert proposed_value to float.""" - if (not hasattr(proposed_value, '__float__') and - not hasattr(proposed_value, '__index__')) or ( - type(proposed_value).__module__ == 'numpy' and - type(proposed_value).__name__ == 'ndarray'): - message = ('%.1024r has type %s, but expected one of: int, float' % - (proposed_value, type(proposed_value))) - raise TypeError(message) - return float(proposed_value) - - def DefaultValue(self): - return 0.0 - - -class FloatValueChecker(DoubleValueChecker): - """Checker used for float fields. - - Performs type-check and range check. - - Values exceeding a 32-bit float will be converted to inf/-inf. - """ - - def CheckValue(self, proposed_value): - """Check and convert proposed_value to float.""" - converted_value = super().CheckValue(proposed_value) - # This inf rounding matches the C++ proto SafeDoubleToFloat logic. - if converted_value > _FLOAT_MAX: - return _INF - if converted_value < _FLOAT_MIN: - return _NEG_INF - - return TruncateToFourByteFloat(converted_value) - -# Type-checkers for all scalar CPPTYPEs. -_VALUE_CHECKERS = { - _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), - _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), - _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), - _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), - _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), - _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), - _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), - _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), -} - - -# Map from field type to a function F, such that F(field_num, value) -# gives the total byte size for a value of the given type. This -# byte size includes tag information and any other additional space -# associated with serializing "value". -TYPE_TO_BYTE_SIZE_FN = { - _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, - _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, - _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, - _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, - _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, - _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, - _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, - _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, - _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, - _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, - _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, - _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, - _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, - _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, - _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, - _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, - _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, - _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize - } - - -# Maps from field types to encoder constructors. -TYPE_TO_ENCODER = { - _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, - _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, - _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, - _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, - _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, - _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, - _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, - _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, - _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, - _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, - _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, - _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, - _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, - _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, - _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, - _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, - _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, - _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, - } - - -# Maps from field types to sizer constructors. -TYPE_TO_SIZER = { - _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, - _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, - _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, - _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, - _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, - _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, - _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, - _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, - _FieldDescriptor.TYPE_STRING: encoder.StringSizer, - _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, - _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, - _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, - _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, - _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, - _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, - _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, - _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, - _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, - } - - -# Maps from field type to a decoder constructor. -TYPE_TO_DECODER = { - _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, - _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, - _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, - _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, - _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, - _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, - _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, - _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, - _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, - _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, - _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, - _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, - _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, - _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, - _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, - _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, - _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, - _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, - } - -# Maps from field type to expected wiretype. -FIELD_TYPE_TO_WIRE_TYPE = { - _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, - _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, - _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, - _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, - _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_STRING: - wire_format.WIRETYPE_LENGTH_DELIMITED, - _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, - _FieldDescriptor.TYPE_MESSAGE: - wire_format.WIRETYPE_LENGTH_DELIMITED, - _FieldDescriptor.TYPE_BYTES: - wire_format.WIRETYPE_LENGTH_DELIMITED, - _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, - _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, - _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, - _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, - } diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/well_known_types.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/well_known_types.py deleted file mode 100644 index b581ab750a..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/well_known_types.py +++ /dev/null @@ -1,878 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains well known classes. - -This files defines well known classes which need extra maintenance including: - - Any - - Duration - - FieldMask - - Struct - - Timestamp -""" - -__author__ = 'jieluo@google.com (Jie Luo)' - -import calendar -import collections.abc -import datetime - -from google.protobuf.descriptor import FieldDescriptor - -_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' -_NANOS_PER_SECOND = 1000000000 -_NANOS_PER_MILLISECOND = 1000000 -_NANOS_PER_MICROSECOND = 1000 -_MILLIS_PER_SECOND = 1000 -_MICROS_PER_SECOND = 1000000 -_SECONDS_PER_DAY = 24 * 3600 -_DURATION_SECONDS_MAX = 315576000000 - - -class Any(object): - """Class for Any Message type.""" - - __slots__ = () - - def Pack(self, msg, type_url_prefix='type.googleapis.com/', - deterministic=None): - """Packs the specified message into current Any message.""" - if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': - self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) - else: - self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) - self.value = msg.SerializeToString(deterministic=deterministic) - - def Unpack(self, msg): - """Unpacks the current Any message into specified message.""" - descriptor = msg.DESCRIPTOR - if not self.Is(descriptor): - return False - msg.ParseFromString(self.value) - return True - - def TypeName(self): - """Returns the protobuf type name of the inner message.""" - # Only last part is to be used: b/25630112 - return self.type_url.split('/')[-1] - - def Is(self, descriptor): - """Checks if this Any represents the given protobuf type.""" - return '/' in self.type_url and self.TypeName() == descriptor.full_name - - -_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) -_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( - 0, tz=datetime.timezone.utc) - - -class Timestamp(object): - """Class for Timestamp message type.""" - - __slots__ = () - - def ToJsonString(self): - """Converts Timestamp to RFC 3339 date string format. - - Returns: - A string converted from timestamp. The string is always Z-normalized - and uses 3, 6 or 9 fractional digits as required to represent the - exact time. Example of the return format: '1972-01-01T10:00:20.021Z' - """ - nanos = self.nanos % _NANOS_PER_SECOND - total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND - seconds = total_sec % _SECONDS_PER_DAY - days = (total_sec - seconds) // _SECONDS_PER_DAY - dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) - - result = dt.isoformat() - if (nanos % 1e9) == 0: - # If there are 0 fractional digits, the fractional - # point '.' should be omitted when serializing. - return result + 'Z' - if (nanos % 1e6) == 0: - # Serialize 3 fractional digits. - return result + '.%03dZ' % (nanos / 1e6) - if (nanos % 1e3) == 0: - # Serialize 6 fractional digits. - return result + '.%06dZ' % (nanos / 1e3) - # Serialize 9 fractional digits. - return result + '.%09dZ' % nanos - - def FromJsonString(self, value): - """Parse a RFC 3339 date string format to Timestamp. - - Args: - value: A date string. Any fractional digits (or none) and any offset are - accepted as long as they fit into nano-seconds precision. - Example of accepted format: '1972-01-01T10:00:20.021-05:00' - - Raises: - ValueError: On parsing problems. - """ - if not isinstance(value, str): - raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) - timezone_offset = value.find('Z') - if timezone_offset == -1: - timezone_offset = value.find('+') - if timezone_offset == -1: - timezone_offset = value.rfind('-') - if timezone_offset == -1: - raise ValueError( - 'Failed to parse timestamp: missing valid timezone offset.') - time_value = value[0:timezone_offset] - # Parse datetime and nanos. - point_position = time_value.find('.') - if point_position == -1: - second_value = time_value - nano_value = '' - else: - second_value = time_value[:point_position] - nano_value = time_value[point_position + 1:] - if 't' in second_value: - raise ValueError( - 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' - 'lowercase \'t\' is not accepted'.format(second_value)) - date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) - td = date_object - datetime.datetime(1970, 1, 1) - seconds = td.seconds + td.days * _SECONDS_PER_DAY - if len(nano_value) > 9: - raise ValueError( - 'Failed to parse Timestamp: nanos {0} more than ' - '9 fractional digits.'.format(nano_value)) - if nano_value: - nanos = round(float('0.' + nano_value) * 1e9) - else: - nanos = 0 - # Parse timezone offsets. - if value[timezone_offset] == 'Z': - if len(value) != timezone_offset + 1: - raise ValueError('Failed to parse timestamp: invalid trailing' - ' data {0}.'.format(value)) - else: - timezone = value[timezone_offset:] - pos = timezone.find(':') - if pos == -1: - raise ValueError( - 'Invalid timezone offset value: {0}.'.format(timezone)) - if timezone[0] == '+': - seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 - else: - seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 - # Set seconds and nanos - self.seconds = int(seconds) - self.nanos = int(nanos) - - def GetCurrentTime(self): - """Get the current UTC into Timestamp.""" - self.FromDatetime(datetime.datetime.utcnow()) - - def ToNanoseconds(self): - """Converts Timestamp to nanoseconds since epoch.""" - return self.seconds * _NANOS_PER_SECOND + self.nanos - - def ToMicroseconds(self): - """Converts Timestamp to microseconds since epoch.""" - return (self.seconds * _MICROS_PER_SECOND + - self.nanos // _NANOS_PER_MICROSECOND) - - def ToMilliseconds(self): - """Converts Timestamp to milliseconds since epoch.""" - return (self.seconds * _MILLIS_PER_SECOND + - self.nanos // _NANOS_PER_MILLISECOND) - - def ToSeconds(self): - """Converts Timestamp to seconds since epoch.""" - return self.seconds - - def FromNanoseconds(self, nanos): - """Converts nanoseconds since epoch to Timestamp.""" - self.seconds = nanos // _NANOS_PER_SECOND - self.nanos = nanos % _NANOS_PER_SECOND - - def FromMicroseconds(self, micros): - """Converts microseconds since epoch to Timestamp.""" - self.seconds = micros // _MICROS_PER_SECOND - self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND - - def FromMilliseconds(self, millis): - """Converts milliseconds since epoch to Timestamp.""" - self.seconds = millis // _MILLIS_PER_SECOND - self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND - - def FromSeconds(self, seconds): - """Converts seconds since epoch to Timestamp.""" - self.seconds = seconds - self.nanos = 0 - - def ToDatetime(self, tzinfo=None): - """Converts Timestamp to a datetime. - - Args: - tzinfo: A datetime.tzinfo subclass; defaults to None. - - Returns: - If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone - information, i.e. not aware that it's UTC). - - Otherwise, returns a timezone-aware datetime in the input timezone. - """ - delta = datetime.timedelta( - seconds=self.seconds, - microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) - if tzinfo is None: - return _EPOCH_DATETIME_NAIVE + delta - else: - return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta - - def FromDatetime(self, dt): - """Converts datetime to Timestamp. - - Args: - dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. - """ - # Using this guide: http://wiki.python.org/moin/WorkingWithTime - # And this conversion guide: http://docs.python.org/library/time.html - - # Turn the date parameter into a tuple (struct_time) that can then be - # manipulated into a long value of seconds. During the conversion from - # struct_time to long, the source date in UTC, and so it follows that the - # correct transformation is calendar.timegm() - self.seconds = calendar.timegm(dt.utctimetuple()) - self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND - - -class Duration(object): - """Class for Duration message type.""" - - __slots__ = () - - def ToJsonString(self): - """Converts Duration to string format. - - Returns: - A string converted from self. The string format will contains - 3, 6, or 9 fractional digits depending on the precision required to - represent the exact Duration value. For example: "1s", "1.010s", - "1.000000100s", "-3.100s" - """ - _CheckDurationValid(self.seconds, self.nanos) - if self.seconds < 0 or self.nanos < 0: - result = '-' - seconds = - self.seconds + int((0 - self.nanos) // 1e9) - nanos = (0 - self.nanos) % 1e9 - else: - result = '' - seconds = self.seconds + int(self.nanos // 1e9) - nanos = self.nanos % 1e9 - result += '%d' % seconds - if (nanos % 1e9) == 0: - # If there are 0 fractional digits, the fractional - # point '.' should be omitted when serializing. - return result + 's' - if (nanos % 1e6) == 0: - # Serialize 3 fractional digits. - return result + '.%03ds' % (nanos / 1e6) - if (nanos % 1e3) == 0: - # Serialize 6 fractional digits. - return result + '.%06ds' % (nanos / 1e3) - # Serialize 9 fractional digits. - return result + '.%09ds' % nanos - - def FromJsonString(self, value): - """Converts a string to Duration. - - Args: - value: A string to be converted. The string must end with 's'. Any - fractional digits (or none) are accepted as long as they fit into - precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s - - Raises: - ValueError: On parsing problems. - """ - if not isinstance(value, str): - raise ValueError('Duration JSON value not a string: {!r}'.format(value)) - if len(value) < 1 or value[-1] != 's': - raise ValueError( - 'Duration must end with letter "s": {0}.'.format(value)) - try: - pos = value.find('.') - if pos == -1: - seconds = int(value[:-1]) - nanos = 0 - else: - seconds = int(value[:pos]) - if value[0] == '-': - nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) - else: - nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) - _CheckDurationValid(seconds, nanos) - self.seconds = seconds - self.nanos = nanos - except ValueError as e: - raise ValueError( - 'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) - - def ToNanoseconds(self): - """Converts a Duration to nanoseconds.""" - return self.seconds * _NANOS_PER_SECOND + self.nanos - - def ToMicroseconds(self): - """Converts a Duration to microseconds.""" - micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) - return self.seconds * _MICROS_PER_SECOND + micros - - def ToMilliseconds(self): - """Converts a Duration to milliseconds.""" - millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) - return self.seconds * _MILLIS_PER_SECOND + millis - - def ToSeconds(self): - """Converts a Duration to seconds.""" - return self.seconds - - def FromNanoseconds(self, nanos): - """Converts nanoseconds to Duration.""" - self._NormalizeDuration(nanos // _NANOS_PER_SECOND, - nanos % _NANOS_PER_SECOND) - - def FromMicroseconds(self, micros): - """Converts microseconds to Duration.""" - self._NormalizeDuration( - micros // _MICROS_PER_SECOND, - (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) - - def FromMilliseconds(self, millis): - """Converts milliseconds to Duration.""" - self._NormalizeDuration( - millis // _MILLIS_PER_SECOND, - (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) - - def FromSeconds(self, seconds): - """Converts seconds to Duration.""" - self.seconds = seconds - self.nanos = 0 - - def ToTimedelta(self): - """Converts Duration to timedelta.""" - return datetime.timedelta( - seconds=self.seconds, microseconds=_RoundTowardZero( - self.nanos, _NANOS_PER_MICROSECOND)) - - def FromTimedelta(self, td): - """Converts timedelta to Duration.""" - self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, - td.microseconds * _NANOS_PER_MICROSECOND) - - def _NormalizeDuration(self, seconds, nanos): - """Set Duration by seconds and nanos.""" - # Force nanos to be negative if the duration is negative. - if seconds < 0 and nanos > 0: - seconds += 1 - nanos -= _NANOS_PER_SECOND - self.seconds = seconds - self.nanos = nanos - - -def _CheckDurationValid(seconds, nanos): - if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: - raise ValueError( - 'Duration is not valid: Seconds {0} must be in range ' - '[-315576000000, 315576000000].'.format(seconds)) - if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: - raise ValueError( - 'Duration is not valid: Nanos {0} must be in range ' - '[-999999999, 999999999].'.format(nanos)) - if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): - raise ValueError( - 'Duration is not valid: Sign mismatch.') - - -def _RoundTowardZero(value, divider): - """Truncates the remainder part after division.""" - # For some languages, the sign of the remainder is implementation - # dependent if any of the operands is negative. Here we enforce - # "rounded toward zero" semantics. For example, for (-5) / 2 an - # implementation may give -3 as the result with the remainder being - # 1. This function ensures we always return -2 (closer to zero). - result = value // divider - remainder = value % divider - if result < 0 and remainder > 0: - return result + 1 - else: - return result - - -class FieldMask(object): - """Class for FieldMask message type.""" - - __slots__ = () - - def ToJsonString(self): - """Converts FieldMask to string according to proto3 JSON spec.""" - camelcase_paths = [] - for path in self.paths: - camelcase_paths.append(_SnakeCaseToCamelCase(path)) - return ','.join(camelcase_paths) - - def FromJsonString(self, value): - """Converts string to FieldMask according to proto3 JSON spec.""" - if not isinstance(value, str): - raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) - self.Clear() - if value: - for path in value.split(','): - self.paths.append(_CamelCaseToSnakeCase(path)) - - def IsValidForDescriptor(self, message_descriptor): - """Checks whether the FieldMask is valid for Message Descriptor.""" - for path in self.paths: - if not _IsValidPath(message_descriptor, path): - return False - return True - - def AllFieldsFromDescriptor(self, message_descriptor): - """Gets all direct fields of Message Descriptor to FieldMask.""" - self.Clear() - for field in message_descriptor.fields: - self.paths.append(field.name) - - def CanonicalFormFromMask(self, mask): - """Converts a FieldMask to the canonical form. - - Removes paths that are covered by another path. For example, - "foo.bar" is covered by "foo" and will be removed if "foo" - is also in the FieldMask. Then sorts all paths in alphabetical order. - - Args: - mask: The original FieldMask to be converted. - """ - tree = _FieldMaskTree(mask) - tree.ToFieldMask(self) - - def Union(self, mask1, mask2): - """Merges mask1 and mask2 into this FieldMask.""" - _CheckFieldMaskMessage(mask1) - _CheckFieldMaskMessage(mask2) - tree = _FieldMaskTree(mask1) - tree.MergeFromFieldMask(mask2) - tree.ToFieldMask(self) - - def Intersect(self, mask1, mask2): - """Intersects mask1 and mask2 into this FieldMask.""" - _CheckFieldMaskMessage(mask1) - _CheckFieldMaskMessage(mask2) - tree = _FieldMaskTree(mask1) - intersection = _FieldMaskTree() - for path in mask2.paths: - tree.IntersectPath(path, intersection) - intersection.ToFieldMask(self) - - def MergeMessage( - self, source, destination, - replace_message_field=False, replace_repeated_field=False): - """Merges fields specified in FieldMask from source to destination. - - Args: - source: Source message. - destination: The destination message to be merged into. - replace_message_field: Replace message field if True. Merge message - field if False. - replace_repeated_field: Replace repeated field if True. Append - elements of repeated field if False. - """ - tree = _FieldMaskTree(self) - tree.MergeMessage( - source, destination, replace_message_field, replace_repeated_field) - - -def _IsValidPath(message_descriptor, path): - """Checks whether the path is valid for Message Descriptor.""" - parts = path.split('.') - last = parts.pop() - for name in parts: - field = message_descriptor.fields_by_name.get(name) - if (field is None or - field.label == FieldDescriptor.LABEL_REPEATED or - field.type != FieldDescriptor.TYPE_MESSAGE): - return False - message_descriptor = field.message_type - return last in message_descriptor.fields_by_name - - -def _CheckFieldMaskMessage(message): - """Raises ValueError if message is not a FieldMask.""" - message_descriptor = message.DESCRIPTOR - if (message_descriptor.name != 'FieldMask' or - message_descriptor.file.name != 'google/protobuf/field_mask.proto'): - raise ValueError('Message {0} is not a FieldMask.'.format( - message_descriptor.full_name)) - - -def _SnakeCaseToCamelCase(path_name): - """Converts a path name from snake_case to camelCase.""" - result = [] - after_underscore = False - for c in path_name: - if c.isupper(): - raise ValueError( - 'Fail to print FieldMask to Json string: Path name ' - '{0} must not contain uppercase letters.'.format(path_name)) - if after_underscore: - if c.islower(): - result.append(c.upper()) - after_underscore = False - else: - raise ValueError( - 'Fail to print FieldMask to Json string: The ' - 'character after a "_" must be a lowercase letter ' - 'in path name {0}.'.format(path_name)) - elif c == '_': - after_underscore = True - else: - result += c - - if after_underscore: - raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' - 'in path name {0}.'.format(path_name)) - return ''.join(result) - - -def _CamelCaseToSnakeCase(path_name): - """Converts a field name from camelCase to snake_case.""" - result = [] - for c in path_name: - if c == '_': - raise ValueError('Fail to parse FieldMask: Path name ' - '{0} must not contain "_"s.'.format(path_name)) - if c.isupper(): - result += '_' - result += c.lower() - else: - result += c - return ''.join(result) - - -class _FieldMaskTree(object): - """Represents a FieldMask in a tree structure. - - For example, given a FieldMask "foo.bar,foo.baz,bar.baz", - the FieldMaskTree will be: - [_root] -+- foo -+- bar - | | - | +- baz - | - +- bar --- baz - In the tree, each leaf node represents a field path. - """ - - __slots__ = ('_root',) - - def __init__(self, field_mask=None): - """Initializes the tree by FieldMask.""" - self._root = {} - if field_mask: - self.MergeFromFieldMask(field_mask) - - def MergeFromFieldMask(self, field_mask): - """Merges a FieldMask to the tree.""" - for path in field_mask.paths: - self.AddPath(path) - - def AddPath(self, path): - """Adds a field path into the tree. - - If the field path to add is a sub-path of an existing field path - in the tree (i.e., a leaf node), it means the tree already matches - the given path so nothing will be added to the tree. If the path - matches an existing non-leaf node in the tree, that non-leaf node - will be turned into a leaf node with all its children removed because - the path matches all the node's children. Otherwise, a new path will - be added. - - Args: - path: The field path to add. - """ - node = self._root - for name in path.split('.'): - if name not in node: - node[name] = {} - elif not node[name]: - # Pre-existing empty node implies we already have this entire tree. - return - node = node[name] - # Remove any sub-trees we might have had. - node.clear() - - def ToFieldMask(self, field_mask): - """Converts the tree to a FieldMask.""" - field_mask.Clear() - _AddFieldPaths(self._root, '', field_mask) - - def IntersectPath(self, path, intersection): - """Calculates the intersection part of a field path with this tree. - - Args: - path: The field path to calculates. - intersection: The out tree to record the intersection part. - """ - node = self._root - for name in path.split('.'): - if name not in node: - return - elif not node[name]: - intersection.AddPath(path) - return - node = node[name] - intersection.AddLeafNodes(path, node) - - def AddLeafNodes(self, prefix, node): - """Adds leaf nodes begin with prefix to this tree.""" - if not node: - self.AddPath(prefix) - for name in node: - child_path = prefix + '.' + name - self.AddLeafNodes(child_path, node[name]) - - def MergeMessage( - self, source, destination, - replace_message, replace_repeated): - """Merge all fields specified by this tree from source to destination.""" - _MergeMessage( - self._root, source, destination, replace_message, replace_repeated) - - -def _StrConvert(value): - """Converts value to str if it is not.""" - # This file is imported by c extension and some methods like ClearField - # requires string for the field name. py2/py3 has different text - # type and may use unicode. - if not isinstance(value, str): - return value.encode('utf-8') - return value - - -def _MergeMessage( - node, source, destination, replace_message, replace_repeated): - """Merge all fields specified by a sub-tree from source to destination.""" - source_descriptor = source.DESCRIPTOR - for name in node: - child = node[name] - field = source_descriptor.fields_by_name[name] - if field is None: - raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( - name, source_descriptor.full_name)) - if child: - # Sub-paths are only allowed for singular message fields. - if (field.label == FieldDescriptor.LABEL_REPEATED or - field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): - raise ValueError('Error: Field {0} in message {1} is not a singular ' - 'message field and cannot have sub-fields.'.format( - name, source_descriptor.full_name)) - if source.HasField(name): - _MergeMessage( - child, getattr(source, name), getattr(destination, name), - replace_message, replace_repeated) - continue - if field.label == FieldDescriptor.LABEL_REPEATED: - if replace_repeated: - destination.ClearField(_StrConvert(name)) - repeated_source = getattr(source, name) - repeated_destination = getattr(destination, name) - repeated_destination.MergeFrom(repeated_source) - else: - if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: - if replace_message: - destination.ClearField(_StrConvert(name)) - if source.HasField(name): - getattr(destination, name).MergeFrom(getattr(source, name)) - else: - setattr(destination, name, getattr(source, name)) - - -def _AddFieldPaths(node, prefix, field_mask): - """Adds the field paths descended from node to field_mask.""" - if not node and prefix: - field_mask.paths.append(prefix) - return - for name in sorted(node): - if prefix: - child_path = prefix + '.' + name - else: - child_path = name - _AddFieldPaths(node[name], child_path, field_mask) - - -def _SetStructValue(struct_value, value): - if value is None: - struct_value.null_value = 0 - elif isinstance(value, bool): - # Note: this check must come before the number check because in Python - # True and False are also considered numbers. - struct_value.bool_value = value - elif isinstance(value, str): - struct_value.string_value = value - elif isinstance(value, (int, float)): - struct_value.number_value = value - elif isinstance(value, (dict, Struct)): - struct_value.struct_value.Clear() - struct_value.struct_value.update(value) - elif isinstance(value, (list, ListValue)): - struct_value.list_value.Clear() - struct_value.list_value.extend(value) - else: - raise ValueError('Unexpected type') - - -def _GetStructValue(struct_value): - which = struct_value.WhichOneof('kind') - if which == 'struct_value': - return struct_value.struct_value - elif which == 'null_value': - return None - elif which == 'number_value': - return struct_value.number_value - elif which == 'string_value': - return struct_value.string_value - elif which == 'bool_value': - return struct_value.bool_value - elif which == 'list_value': - return struct_value.list_value - elif which is None: - raise ValueError('Value not set') - - -class Struct(object): - """Class for Struct message type.""" - - __slots__ = () - - def __getitem__(self, key): - return _GetStructValue(self.fields[key]) - - def __contains__(self, item): - return item in self.fields - - def __setitem__(self, key, value): - _SetStructValue(self.fields[key], value) - - def __delitem__(self, key): - del self.fields[key] - - def __len__(self): - return len(self.fields) - - def __iter__(self): - return iter(self.fields) - - def keys(self): # pylint: disable=invalid-name - return self.fields.keys() - - def values(self): # pylint: disable=invalid-name - return [self[key] for key in self] - - def items(self): # pylint: disable=invalid-name - return [(key, self[key]) for key in self] - - def get_or_create_list(self, key): - """Returns a list for this key, creating if it didn't exist already.""" - if not self.fields[key].HasField('list_value'): - # Clear will mark list_value modified which will indeed create a list. - self.fields[key].list_value.Clear() - return self.fields[key].list_value - - def get_or_create_struct(self, key): - """Returns a struct for this key, creating if it didn't exist already.""" - if not self.fields[key].HasField('struct_value'): - # Clear will mark struct_value modified which will indeed create a struct. - self.fields[key].struct_value.Clear() - return self.fields[key].struct_value - - def update(self, dictionary): # pylint: disable=invalid-name - for key, value in dictionary.items(): - _SetStructValue(self.fields[key], value) - -collections.abc.MutableMapping.register(Struct) - - -class ListValue(object): - """Class for ListValue message type.""" - - __slots__ = () - - def __len__(self): - return len(self.values) - - def append(self, value): - _SetStructValue(self.values.add(), value) - - def extend(self, elem_seq): - for value in elem_seq: - self.append(value) - - def __getitem__(self, index): - """Retrieves item by the specified index.""" - return _GetStructValue(self.values.__getitem__(index)) - - def __setitem__(self, index, value): - _SetStructValue(self.values.__getitem__(index), value) - - def __delitem__(self, key): - del self.values[key] - - def items(self): - for i in range(len(self)): - yield self[i] - - def add_struct(self): - """Appends and returns a struct value as the next value in the list.""" - struct_value = self.values.add().struct_value - # Clear will mark struct_value modified which will indeed create a struct. - struct_value.Clear() - return struct_value - - def add_list(self): - """Appends and returns a list value as the next value in the list.""" - list_value = self.values.add().list_value - # Clear will mark list_value modified which will indeed create a list. - list_value.Clear() - return list_value - -collections.abc.MutableSequence.register(ListValue) - - -WKTBASES = { - 'google.protobuf.Any': Any, - 'google.protobuf.Duration': Duration, - 'google.protobuf.FieldMask': FieldMask, - 'google.protobuf.ListValue': ListValue, - 'google.protobuf.Struct': Struct, - 'google.protobuf.Timestamp': Timestamp, -} diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/wire_format.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/wire_format.py deleted file mode 100644 index 883f525585..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/internal/wire_format.py +++ /dev/null @@ -1,268 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Constants and static functions to support protocol buffer wire format.""" - -__author__ = 'robinson@google.com (Will Robinson)' - -import struct -from google.protobuf import descriptor -from google.protobuf import message - - -TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag. -TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7 - -# These numbers identify the wire type of a protocol buffer value. -# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded -# tag-and-type to store one of these WIRETYPE_* constants. -# These values must match WireType enum in google/protobuf/wire_format.h. -WIRETYPE_VARINT = 0 -WIRETYPE_FIXED64 = 1 -WIRETYPE_LENGTH_DELIMITED = 2 -WIRETYPE_START_GROUP = 3 -WIRETYPE_END_GROUP = 4 -WIRETYPE_FIXED32 = 5 -_WIRETYPE_MAX = 5 - - -# Bounds for various integer types. -INT32_MAX = int((1 << 31) - 1) -INT32_MIN = int(-(1 << 31)) -UINT32_MAX = (1 << 32) - 1 - -INT64_MAX = (1 << 63) - 1 -INT64_MIN = -(1 << 63) -UINT64_MAX = (1 << 64) - 1 - -# "struct" format strings that will encode/decode the specified formats. -FORMAT_UINT32_LITTLE_ENDIAN = '> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) - - -def ZigZagEncode(value): - """ZigZag Transform: Encodes signed integers so that they can be - effectively used with varint encoding. See wire_format.h for - more details. - """ - if value >= 0: - return value << 1 - return (value << 1) ^ (~0) - - -def ZigZagDecode(value): - """Inverse of ZigZagEncode().""" - if not value & 0x1: - return value >> 1 - return (value >> 1) ^ (~0) - - - -# The *ByteSize() functions below return the number of bytes required to -# serialize "field number + type" information and then serialize the value. - - -def Int32ByteSize(field_number, int32): - return Int64ByteSize(field_number, int32) - - -def Int32ByteSizeNoTag(int32): - return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) - - -def Int64ByteSize(field_number, int64): - # Have to convert to uint before calling UInt64ByteSize(). - return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) - - -def UInt32ByteSize(field_number, uint32): - return UInt64ByteSize(field_number, uint32) - - -def UInt64ByteSize(field_number, uint64): - return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) - - -def SInt32ByteSize(field_number, int32): - return UInt32ByteSize(field_number, ZigZagEncode(int32)) - - -def SInt64ByteSize(field_number, int64): - return UInt64ByteSize(field_number, ZigZagEncode(int64)) - - -def Fixed32ByteSize(field_number, fixed32): - return TagByteSize(field_number) + 4 - - -def Fixed64ByteSize(field_number, fixed64): - return TagByteSize(field_number) + 8 - - -def SFixed32ByteSize(field_number, sfixed32): - return TagByteSize(field_number) + 4 - - -def SFixed64ByteSize(field_number, sfixed64): - return TagByteSize(field_number) + 8 - - -def FloatByteSize(field_number, flt): - return TagByteSize(field_number) + 4 - - -def DoubleByteSize(field_number, double): - return TagByteSize(field_number) + 8 - - -def BoolByteSize(field_number, b): - return TagByteSize(field_number) + 1 - - -def EnumByteSize(field_number, enum): - return UInt32ByteSize(field_number, enum) - - -def StringByteSize(field_number, string): - return BytesByteSize(field_number, string.encode('utf-8')) - - -def BytesByteSize(field_number, b): - return (TagByteSize(field_number) - + _VarUInt64ByteSizeNoTag(len(b)) - + len(b)) - - -def GroupByteSize(field_number, message): - return (2 * TagByteSize(field_number) # START and END group. - + message.ByteSize()) - - -def MessageByteSize(field_number, message): - return (TagByteSize(field_number) - + _VarUInt64ByteSizeNoTag(message.ByteSize()) - + message.ByteSize()) - - -def MessageSetItemByteSize(field_number, msg): - # First compute the sizes of the tags. - # There are 2 tags for the beginning and ending of the repeated group, that - # is field number 1, one with field number 2 (type_id) and one with field - # number 3 (message). - total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) - - # Add the number of bytes for type_id. - total_size += _VarUInt64ByteSizeNoTag(field_number) - - message_size = msg.ByteSize() - - # The number of bytes for encoding the length of the message. - total_size += _VarUInt64ByteSizeNoTag(message_size) - - # The size of the message. - total_size += message_size - return total_size - - -def TagByteSize(field_number): - """Returns the bytes required to serialize a tag with this field number.""" - # Just pass in type 0, since the type won't affect the tag+type size. - return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) - - -# Private helper function for the *ByteSize() functions above. - -def _VarUInt64ByteSizeNoTag(uint64): - """Returns the number of bytes required to serialize a single varint - using boundary value comparisons. (unrolled loop optimization -WPierce) - uint64 must be unsigned. - """ - if uint64 <= 0x7f: return 1 - if uint64 <= 0x3fff: return 2 - if uint64 <= 0x1fffff: return 3 - if uint64 <= 0xfffffff: return 4 - if uint64 <= 0x7ffffffff: return 5 - if uint64 <= 0x3ffffffffff: return 6 - if uint64 <= 0x1ffffffffffff: return 7 - if uint64 <= 0xffffffffffffff: return 8 - if uint64 <= 0x7fffffffffffffff: return 9 - if uint64 > UINT64_MAX: - raise message.EncodeError('Value out of range: %d' % uint64) - return 10 - - -NON_PACKABLE_TYPES = ( - descriptor.FieldDescriptor.TYPE_STRING, - descriptor.FieldDescriptor.TYPE_GROUP, - descriptor.FieldDescriptor.TYPE_MESSAGE, - descriptor.FieldDescriptor.TYPE_BYTES -) - - -def IsTypePackable(field_type): - """Return true iff packable = true is valid for fields of this type. - - Args: - field_type: a FieldDescriptor::Type value. - - Returns: - True iff fields of this type are packable. - """ - return field_type not in NON_PACKABLE_TYPES diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/json_format.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/json_format.py deleted file mode 100644 index 5024ed89d7..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/json_format.py +++ /dev/null @@ -1,912 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains routines for printing protocol messages in JSON format. - -Simple usage example: - - # Create a proto object and serialize it to a json format string. - message = my_proto_pb2.MyMessage(foo='bar') - json_string = json_format.MessageToJson(message) - - # Parse a json format string to proto object. - message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) -""" - -__author__ = 'jieluo@google.com (Jie Luo)' - - -import base64 -from collections import OrderedDict -import json -import math -from operator import methodcaller -import re -import sys - -from google.protobuf.internal import type_checkers -from google.protobuf import descriptor -from google.protobuf import symbol_database - - -_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' -_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, - descriptor.FieldDescriptor.CPPTYPE_UINT32, - descriptor.FieldDescriptor.CPPTYPE_INT64, - descriptor.FieldDescriptor.CPPTYPE_UINT64]) -_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, - descriptor.FieldDescriptor.CPPTYPE_UINT64]) -_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, - descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) -_INFINITY = 'Infinity' -_NEG_INFINITY = '-Infinity' -_NAN = 'NaN' - -_UNPAIRED_SURROGATE_PATTERN = re.compile( - u'[\ud800-\udbff](?![\udc00-\udfff])|(? self.max_recursion_depth: - raise ParseError('Message too deep. Max recursion depth is {0}'.format( - self.max_recursion_depth)) - message_descriptor = message.DESCRIPTOR - full_name = message_descriptor.full_name - if not path: - path = message_descriptor.name - if _IsWrapperMessage(message_descriptor): - self._ConvertWrapperMessage(value, message, path) - elif full_name in _WKTJSONMETHODS: - methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) - else: - self._ConvertFieldValuePair(value, message, path) - self.recursion_depth -= 1 - - def _ConvertFieldValuePair(self, js, message, path): - """Convert field value pairs into regular message. - - Args: - js: A JSON object to convert the field value pairs. - message: A regular protocol message to record the data. - path: parent path to log parse error info. - - Raises: - ParseError: In case of problems converting. - """ - names = [] - message_descriptor = message.DESCRIPTOR - fields_by_json_name = dict((f.json_name, f) - for f in message_descriptor.fields) - for name in js: - try: - field = fields_by_json_name.get(name, None) - if not field: - field = message_descriptor.fields_by_name.get(name, None) - if not field and _VALID_EXTENSION_NAME.match(name): - if not message_descriptor.is_extendable: - raise ParseError( - 'Message type {0} does not have extensions at {1}'.format( - message_descriptor.full_name, path)) - identifier = name[1:-1] # strip [] brackets - # pylint: disable=protected-access - field = message.Extensions._FindExtensionByName(identifier) - # pylint: enable=protected-access - if not field: - # Try looking for extension by the message type name, dropping the - # field name following the final . separator in full_name. - identifier = '.'.join(identifier.split('.')[:-1]) - # pylint: disable=protected-access - field = message.Extensions._FindExtensionByName(identifier) - # pylint: enable=protected-access - if not field: - if self.ignore_unknown_fields: - continue - raise ParseError( - ('Message type "{0}" has no field named "{1}" at "{2}".\n' - ' Available Fields(except extensions): "{3}"').format( - message_descriptor.full_name, name, path, - [f.json_name for f in message_descriptor.fields])) - if name in names: - raise ParseError('Message type "{0}" should not have multiple ' - '"{1}" fields at "{2}".'.format( - message.DESCRIPTOR.full_name, name, path)) - names.append(name) - value = js[name] - # Check no other oneof field is parsed. - if field.containing_oneof is not None and value is not None: - oneof_name = field.containing_oneof.name - if oneof_name in names: - raise ParseError('Message type "{0}" should not have multiple ' - '"{1}" oneof fields at "{2}".'.format( - message.DESCRIPTOR.full_name, oneof_name, - path)) - names.append(oneof_name) - - if value is None: - if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE - and field.message_type.full_name == 'google.protobuf.Value'): - sub_message = getattr(message, field.name) - sub_message.null_value = 0 - elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM - and field.enum_type.full_name == 'google.protobuf.NullValue'): - setattr(message, field.name, 0) - else: - message.ClearField(field.name) - continue - - # Parse field value. - if _IsMapEntry(field): - message.ClearField(field.name) - self._ConvertMapFieldValue(value, message, field, - '{0}.{1}'.format(path, name)) - elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - message.ClearField(field.name) - if not isinstance(value, list): - raise ParseError('repeated field {0} must be in [] which is ' - '{1} at {2}'.format(name, value, path)) - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - # Repeated message field. - for index, item in enumerate(value): - sub_message = getattr(message, field.name).add() - # None is a null_value in Value. - if (item is None and - sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): - raise ParseError('null is not allowed to be used as an element' - ' in a repeated field at {0}.{1}[{2}]'.format( - path, name, index)) - self.ConvertMessage(item, sub_message, - '{0}.{1}[{2}]'.format(path, name, index)) - else: - # Repeated scalar field. - for index, item in enumerate(value): - if item is None: - raise ParseError('null is not allowed to be used as an element' - ' in a repeated field at {0}.{1}[{2}]'.format( - path, name, index)) - getattr(message, field.name).append( - _ConvertScalarFieldValue( - item, field, '{0}.{1}[{2}]'.format(path, name, index))) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - if field.is_extension: - sub_message = message.Extensions[field] - else: - sub_message = getattr(message, field.name) - sub_message.SetInParent() - self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) - else: - if field.is_extension: - message.Extensions[field] = _ConvertScalarFieldValue( - value, field, '{0}.{1}'.format(path, name)) - else: - setattr( - message, field.name, - _ConvertScalarFieldValue(value, field, - '{0}.{1}'.format(path, name))) - except ParseError as e: - if field and field.containing_oneof is None: - raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) - else: - raise ParseError(str(e)) - except ValueError as e: - raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) - except TypeError as e: - raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) - - def _ConvertAnyMessage(self, value, message, path): - """Convert a JSON representation into Any message.""" - if isinstance(value, dict) and not value: - return - try: - type_url = value['@type'] - except KeyError: - raise ParseError( - '@type is missing when parsing any message at {0}'.format(path)) - - try: - sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) - except TypeError as e: - raise ParseError('{0} at {1}'.format(e, path)) - message_descriptor = sub_message.DESCRIPTOR - full_name = message_descriptor.full_name - if _IsWrapperMessage(message_descriptor): - self._ConvertWrapperMessage(value['value'], sub_message, - '{0}.value'.format(path)) - elif full_name in _WKTJSONMETHODS: - methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, - '{0}.value'.format(path))( - self) - else: - del value['@type'] - self._ConvertFieldValuePair(value, sub_message, path) - value['@type'] = type_url - # Sets Any message - message.value = sub_message.SerializeToString() - message.type_url = type_url - - def _ConvertGenericMessage(self, value, message, path): - """Convert a JSON representation into message with FromJsonString.""" - # Duration, Timestamp, FieldMask have a FromJsonString method to do the - # conversion. Users can also call the method directly. - try: - message.FromJsonString(value) - except ValueError as e: - raise ParseError('{0} at {1}'.format(e, path)) - - def _ConvertValueMessage(self, value, message, path): - """Convert a JSON representation into Value message.""" - if isinstance(value, dict): - self._ConvertStructMessage(value, message.struct_value, path) - elif isinstance(value, list): - self._ConvertListValueMessage(value, message.list_value, path) - elif value is None: - message.null_value = 0 - elif isinstance(value, bool): - message.bool_value = value - elif isinstance(value, str): - message.string_value = value - elif isinstance(value, _INT_OR_FLOAT): - message.number_value = value - else: - raise ParseError('Value {0} has unexpected type {1} at {2}'.format( - value, type(value), path)) - - def _ConvertListValueMessage(self, value, message, path): - """Convert a JSON representation into ListValue message.""" - if not isinstance(value, list): - raise ParseError('ListValue must be in [] which is {0} at {1}'.format( - value, path)) - message.ClearField('values') - for index, item in enumerate(value): - self._ConvertValueMessage(item, message.values.add(), - '{0}[{1}]'.format(path, index)) - - def _ConvertStructMessage(self, value, message, path): - """Convert a JSON representation into Struct message.""" - if not isinstance(value, dict): - raise ParseError('Struct must be in a dict which is {0} at {1}'.format( - value, path)) - # Clear will mark the struct as modified so it will be created even if - # there are no values. - message.Clear() - for key in value: - self._ConvertValueMessage(value[key], message.fields[key], - '{0}.{1}'.format(path, key)) - return - - def _ConvertWrapperMessage(self, value, message, path): - """Convert a JSON representation into Wrapper message.""" - field = message.DESCRIPTOR.fields_by_name['value'] - setattr( - message, 'value', - _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) - - def _ConvertMapFieldValue(self, value, message, field, path): - """Convert map field value for a message map field. - - Args: - value: A JSON object to convert the map field value. - message: A protocol message to record the converted data. - field: The descriptor of the map field to be converted. - path: parent path to log parse error info. - - Raises: - ParseError: In case of convert problems. - """ - if not isinstance(value, dict): - raise ParseError( - 'Map field {0} must be in a dict which is {1} at {2}'.format( - field.name, value, path)) - key_field = field.message_type.fields_by_name['key'] - value_field = field.message_type.fields_by_name['value'] - for key in value: - key_value = _ConvertScalarFieldValue(key, key_field, - '{0}.key'.format(path), True) - if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - self.ConvertMessage(value[key], - getattr(message, field.name)[key_value], - '{0}[{1}]'.format(path, key_value)) - else: - getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( - value[key], value_field, path='{0}[{1}]'.format(path, key_value)) - - -def _ConvertScalarFieldValue(value, field, path, require_str=False): - """Convert a single scalar field value. - - Args: - value: A scalar value to convert the scalar field value. - field: The descriptor of the field to convert. - path: parent path to log parse error info. - require_str: If True, the field value must be a str. - - Returns: - The converted scalar field value - - Raises: - ParseError: In case of convert problems. - """ - try: - if field.cpp_type in _INT_TYPES: - return _ConvertInteger(value) - elif field.cpp_type in _FLOAT_TYPES: - return _ConvertFloat(value, field) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: - return _ConvertBool(value, require_str) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: - if field.type == descriptor.FieldDescriptor.TYPE_BYTES: - if isinstance(value, str): - encoded = value.encode('utf-8') - else: - encoded = value - # Add extra padding '=' - padded_value = encoded + b'=' * (4 - len(encoded) % 4) - return base64.urlsafe_b64decode(padded_value) - else: - # Checking for unpaired surrogates appears to be unreliable, - # depending on the specific Python version, so we check manually. - if _UNPAIRED_SURROGATE_PATTERN.search(value): - raise ParseError('Unpaired surrogate') - return value - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: - # Convert an enum value. - enum_value = field.enum_type.values_by_name.get(value, None) - if enum_value is None: - try: - number = int(value) - enum_value = field.enum_type.values_by_number.get(number, None) - except ValueError: - raise ParseError('Invalid enum value {0} for enum type {1}'.format( - value, field.enum_type.full_name)) - if enum_value is None: - if field.file.syntax == 'proto3': - # Proto3 accepts unknown enums. - return number - raise ParseError('Invalid enum value {0} for enum type {1}'.format( - value, field.enum_type.full_name)) - return enum_value.number - except ParseError as e: - raise ParseError('{0} at {1}'.format(e, path)) - - -def _ConvertInteger(value): - """Convert an integer. - - Args: - value: A scalar value to convert. - - Returns: - The integer value. - - Raises: - ParseError: If an integer couldn't be consumed. - """ - if isinstance(value, float) and not value.is_integer(): - raise ParseError('Couldn\'t parse integer: {0}'.format(value)) - - if isinstance(value, str) and value.find(' ') != -1: - raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) - - if isinstance(value, bool): - raise ParseError('Bool value {0} is not acceptable for ' - 'integer field'.format(value)) - - return int(value) - - -def _ConvertFloat(value, field): - """Convert an floating point number.""" - if isinstance(value, float): - if math.isnan(value): - raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') - if math.isinf(value): - if value > 0: - raise ParseError('Couldn\'t parse Infinity or value too large, ' - 'use quoted "Infinity" instead') - else: - raise ParseError('Couldn\'t parse -Infinity or value too small, ' - 'use quoted "-Infinity" instead') - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: - # pylint: disable=protected-access - if value > type_checkers._FLOAT_MAX: - raise ParseError('Float value too large') - # pylint: disable=protected-access - if value < type_checkers._FLOAT_MIN: - raise ParseError('Float value too small') - if value == 'nan': - raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') - try: - # Assume Python compatible syntax. - return float(value) - except ValueError: - # Check alternative spellings. - if value == _NEG_INFINITY: - return float('-inf') - elif value == _INFINITY: - return float('inf') - elif value == _NAN: - return float('nan') - else: - raise ParseError('Couldn\'t parse float: {0}'.format(value)) - - -def _ConvertBool(value, require_str): - """Convert a boolean value. - - Args: - value: A scalar value to convert. - require_str: If True, value must be a str. - - Returns: - The bool parsed. - - Raises: - ParseError: If a boolean value couldn't be consumed. - """ - if require_str: - if value == 'true': - return True - elif value == 'false': - return False - else: - raise ParseError('Expected "true" or "false", not {0}'.format(value)) - - if not isinstance(value, bool): - raise ParseError('Expected true or false without quotes') - return value - -_WKTJSONMETHODS = { - 'google.protobuf.Any': ['_AnyMessageToJsonObject', - '_ConvertAnyMessage'], - 'google.protobuf.Duration': ['_GenericMessageToJsonObject', - '_ConvertGenericMessage'], - 'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', - '_ConvertGenericMessage'], - 'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', - '_ConvertListValueMessage'], - 'google.protobuf.Struct': ['_StructMessageToJsonObject', - '_ConvertStructMessage'], - 'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', - '_ConvertGenericMessage'], - 'google.protobuf.Value': ['_ValueMessageToJsonObject', - '_ConvertValueMessage'] -} diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message.py deleted file mode 100644 index 76c6802f70..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message.py +++ /dev/null @@ -1,424 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# TODO(robinson): We should just make these methods all "pure-virtual" and move -# all implementation out, into reflection.py for now. - - -"""Contains an abstract base class for protocol messages.""" - -__author__ = 'robinson@google.com (Will Robinson)' - -class Error(Exception): - """Base error type for this module.""" - pass - - -class DecodeError(Error): - """Exception raised when deserializing messages.""" - pass - - -class EncodeError(Error): - """Exception raised when serializing messages.""" - pass - - -class Message(object): - - """Abstract base class for protocol messages. - - Protocol message classes are almost always generated by the protocol - compiler. These generated types subclass Message and implement the methods - shown below. - """ - - # TODO(robinson): Link to an HTML document here. - - # TODO(robinson): Document that instances of this class will also - # have an Extensions attribute with __getitem__ and __setitem__. - # Again, not sure how to best convey this. - - # TODO(robinson): Document that the class must also have a static - # RegisterExtension(extension_field) method. - # Not sure how to best express at this point. - - # TODO(robinson): Document these fields and methods. - - __slots__ = [] - - #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. - DESCRIPTOR = None - - def __deepcopy__(self, memo=None): - clone = type(self)() - clone.MergeFrom(self) - return clone - - def __eq__(self, other_msg): - """Recursively compares two messages by value and structure.""" - raise NotImplementedError - - def __ne__(self, other_msg): - # Can't just say self != other_msg, since that would infinitely recurse. :) - return not self == other_msg - - def __hash__(self): - raise TypeError('unhashable object') - - def __str__(self): - """Outputs a human-readable representation of the message.""" - raise NotImplementedError - - def __unicode__(self): - """Outputs a human-readable representation of the message.""" - raise NotImplementedError - - def MergeFrom(self, other_msg): - """Merges the contents of the specified message into current message. - - This method merges the contents of the specified message into the current - message. Singular fields that are set in the specified message overwrite - the corresponding fields in the current message. Repeated fields are - appended. Singular sub-messages and groups are recursively merged. - - Args: - other_msg (Message): A message to merge into the current message. - """ - raise NotImplementedError - - def CopyFrom(self, other_msg): - """Copies the content of the specified message into the current message. - - The method clears the current message and then merges the specified - message using MergeFrom. - - Args: - other_msg (Message): A message to copy into the current one. - """ - if self is other_msg: - return - self.Clear() - self.MergeFrom(other_msg) - - def Clear(self): - """Clears all data that was set in the message.""" - raise NotImplementedError - - def SetInParent(self): - """Mark this as present in the parent. - - This normally happens automatically when you assign a field of a - sub-message, but sometimes you want to make the sub-message - present while keeping it empty. If you find yourself using this, - you may want to reconsider your design. - """ - raise NotImplementedError - - def IsInitialized(self): - """Checks if the message is initialized. - - Returns: - bool: The method returns True if the message is initialized (i.e. all of - its required fields are set). - """ - raise NotImplementedError - - # TODO(robinson): MergeFromString() should probably return None and be - # implemented in terms of a helper that returns the # of bytes read. Our - # deserialization routines would use the helper when recursively - # deserializing, but the end user would almost always just want the no-return - # MergeFromString(). - - def MergeFromString(self, serialized): - """Merges serialized protocol buffer data into this message. - - When we find a field in `serialized` that is already present - in this message: - - - If it's a "repeated" field, we append to the end of our list. - - Else, if it's a scalar, we overwrite our field. - - Else, (it's a nonrepeated composite), we recursively merge - into the existing composite. - - Args: - serialized (bytes): Any object that allows us to call - ``memoryview(serialized)`` to access a string of bytes using the - buffer interface. - - Returns: - int: The number of bytes read from `serialized`. - For non-group messages, this will always be `len(serialized)`, - but for messages which are actually groups, this will - generally be less than `len(serialized)`, since we must - stop when we reach an ``END_GROUP`` tag. Note that if - we *do* stop because of an ``END_GROUP`` tag, the number - of bytes returned does not include the bytes - for the ``END_GROUP`` tag information. - - Raises: - DecodeError: if the input cannot be parsed. - """ - # TODO(robinson): Document handling of unknown fields. - # TODO(robinson): When we switch to a helper, this will return None. - raise NotImplementedError - - def ParseFromString(self, serialized): - """Parse serialized protocol buffer data into this message. - - Like :func:`MergeFromString()`, except we clear the object first. - - Raises: - message.DecodeError if the input cannot be parsed. - """ - self.Clear() - return self.MergeFromString(serialized) - - def SerializeToString(self, **kwargs): - """Serializes the protocol message to a binary string. - - Keyword Args: - deterministic (bool): If true, requests deterministic serialization - of the protobuf, with predictable ordering of map keys. - - Returns: - A binary string representation of the message if all of the required - fields in the message are set (i.e. the message is initialized). - - Raises: - EncodeError: if the message isn't initialized (see :func:`IsInitialized`). - """ - raise NotImplementedError - - def SerializePartialToString(self, **kwargs): - """Serializes the protocol message to a binary string. - - This method is similar to SerializeToString but doesn't check if the - message is initialized. - - Keyword Args: - deterministic (bool): If true, requests deterministic serialization - of the protobuf, with predictable ordering of map keys. - - Returns: - bytes: A serialized representation of the partial message. - """ - raise NotImplementedError - - # TODO(robinson): Decide whether we like these better - # than auto-generated has_foo() and clear_foo() methods - # on the instances themselves. This way is less consistent - # with C++, but it makes reflection-type access easier and - # reduces the number of magically autogenerated things. - # - # TODO(robinson): Be sure to document (and test) exactly - # which field names are accepted here. Are we case-sensitive? - # What do we do with fields that share names with Python keywords - # like 'lambda' and 'yield'? - # - # nnorwitz says: - # """ - # Typically (in python), an underscore is appended to names that are - # keywords. So they would become lambda_ or yield_. - # """ - def ListFields(self): - """Returns a list of (FieldDescriptor, value) tuples for present fields. - - A message field is non-empty if HasField() would return true. A singular - primitive field is non-empty if HasField() would return true in proto2 or it - is non zero in proto3. A repeated field is non-empty if it contains at least - one element. The fields are ordered by field number. - - Returns: - list[tuple(FieldDescriptor, value)]: field descriptors and values - for all fields in the message which are not empty. The values vary by - field type. - """ - raise NotImplementedError - - def HasField(self, field_name): - """Checks if a certain field is set for the message. - - For a oneof group, checks if any field inside is set. Note that if the - field_name is not defined in the message descriptor, :exc:`ValueError` will - be raised. - - Args: - field_name (str): The name of the field to check for presence. - - Returns: - bool: Whether a value has been set for the named field. - - Raises: - ValueError: if the `field_name` is not a member of this message. - """ - raise NotImplementedError - - def ClearField(self, field_name): - """Clears the contents of a given field. - - Inside a oneof group, clears the field set. If the name neither refers to a - defined field or oneof group, :exc:`ValueError` is raised. - - Args: - field_name (str): The name of the field to check for presence. - - Raises: - ValueError: if the `field_name` is not a member of this message. - """ - raise NotImplementedError - - def WhichOneof(self, oneof_group): - """Returns the name of the field that is set inside a oneof group. - - If no field is set, returns None. - - Args: - oneof_group (str): the name of the oneof group to check. - - Returns: - str or None: The name of the group that is set, or None. - - Raises: - ValueError: no group with the given name exists - """ - raise NotImplementedError - - def HasExtension(self, extension_handle): - """Checks if a certain extension is present for this message. - - Extensions are retrieved using the :attr:`Extensions` mapping (if present). - - Args: - extension_handle: The handle for the extension to check. - - Returns: - bool: Whether the extension is present for this message. - - Raises: - KeyError: if the extension is repeated. Similar to repeated fields, - there is no separate notion of presence: a "not present" repeated - extension is an empty list. - """ - raise NotImplementedError - - def ClearExtension(self, extension_handle): - """Clears the contents of a given extension. - - Args: - extension_handle: The handle for the extension to clear. - """ - raise NotImplementedError - - def UnknownFields(self): - """Returns the UnknownFieldSet. - - Returns: - UnknownFieldSet: The unknown fields stored in this message. - """ - raise NotImplementedError - - def DiscardUnknownFields(self): - """Clears all fields in the :class:`UnknownFieldSet`. - - This operation is recursive for nested message. - """ - raise NotImplementedError - - def ByteSize(self): - """Returns the serialized size of this message. - - Recursively calls ByteSize() on all contained messages. - - Returns: - int: The number of bytes required to serialize this message. - """ - raise NotImplementedError - - @classmethod - def FromString(cls, s): - raise NotImplementedError - - @staticmethod - def RegisterExtension(extension_handle): - raise NotImplementedError - - def _SetListener(self, message_listener): - """Internal method used by the protocol message implementation. - Clients should not call this directly. - - Sets a listener that this message will call on certain state transitions. - - The purpose of this method is to register back-edges from children to - parents at runtime, for the purpose of setting "has" bits and - byte-size-dirty bits in the parent and ancestor objects whenever a child or - descendant object is modified. - - If the client wants to disconnect this Message from the object tree, she - explicitly sets callback to None. - - If message_listener is None, unregisters any existing listener. Otherwise, - message_listener must implement the MessageListener interface in - internal/message_listener.py, and we discard any listener registered - via a previous _SetListener() call. - """ - raise NotImplementedError - - def __getstate__(self): - """Support the pickle protocol.""" - return dict(serialized=self.SerializePartialToString()) - - def __setstate__(self, state): - """Support the pickle protocol.""" - self.__init__() - serialized = state['serialized'] - # On Python 3, using encoding='latin1' is required for unpickling - # protos pickled by Python 2. - if not isinstance(serialized, bytes): - serialized = serialized.encode('latin1') - self.ParseFromString(serialized) - - def __reduce__(self): - message_descriptor = self.DESCRIPTOR - if message_descriptor.containing_type is None: - return type(self), (), self.__getstate__() - # the message type must be nested. - # Python does not pickle nested classes; use the symbol_database on the - # receiving end. - container = message_descriptor - return (_InternalConstructMessage, (container.full_name,), - self.__getstate__()) - - -def _InternalConstructMessage(full_name): - """Constructs a nested message.""" - from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top - - return symbol_database.Default().GetSymbol(full_name)() diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message_factory.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message_factory.py deleted file mode 100644 index 3656fa6874..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/message_factory.py +++ /dev/null @@ -1,185 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Provides a factory class for generating dynamic messages. - -The easiest way to use this class is if you have access to the FileDescriptor -protos containing the messages you want to create you can just do the following: - -message_classes = message_factory.GetMessages(iterable_of_file_descriptors) -my_proto_instance = message_classes['some.proto.package.MessageName']() -""" - -__author__ = 'matthewtoia@google.com (Matt Toia)' - -from google.protobuf.internal import api_implementation -from google.protobuf import descriptor_pool -from google.protobuf import message - -if api_implementation.Type() == 'cpp': - from google.protobuf.pyext import cpp_message as message_impl -else: - from google.protobuf.internal import python_message as message_impl - - -# The type of all Message classes. -_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType - - -class MessageFactory(object): - """Factory for creating Proto2 messages from descriptors in a pool.""" - - def __init__(self, pool=None): - """Initializes a new factory.""" - self.pool = pool or descriptor_pool.DescriptorPool() - - # local cache of all classes built from protobuf descriptors - self._classes = {} - - def GetPrototype(self, descriptor): - """Obtains a proto2 message class based on the passed in descriptor. - - Passing a descriptor with a fully qualified name matching a previous - invocation will cause the same class to be returned. - - Args: - descriptor: The descriptor to build from. - - Returns: - A class describing the passed in descriptor. - """ - if descriptor not in self._classes: - result_class = self.CreatePrototype(descriptor) - # The assignment to _classes is redundant for the base implementation, but - # might avoid confusion in cases where CreatePrototype gets overridden and - # does not call the base implementation. - self._classes[descriptor] = result_class - return result_class - return self._classes[descriptor] - - def CreatePrototype(self, descriptor): - """Builds a proto2 message class based on the passed in descriptor. - - Don't call this function directly, it always creates a new class. Call - GetPrototype() instead. This method is meant to be overridden in subblasses - to perform additional operations on the newly constructed class. - - Args: - descriptor: The descriptor to build from. - - Returns: - A class describing the passed in descriptor. - """ - descriptor_name = descriptor.name - result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( - descriptor_name, - (message.Message,), - { - 'DESCRIPTOR': descriptor, - # If module not set, it wrongly points to message_factory module. - '__module__': None, - }) - result_class._FACTORY = self # pylint: disable=protected-access - # Assign in _classes before doing recursive calls to avoid infinite - # recursion. - self._classes[descriptor] = result_class - for field in descriptor.fields: - if field.message_type: - self.GetPrototype(field.message_type) - for extension in result_class.DESCRIPTOR.extensions: - if extension.containing_type not in self._classes: - self.GetPrototype(extension.containing_type) - extended_class = self._classes[extension.containing_type] - extended_class.RegisterExtension(extension) - return result_class - - def GetMessages(self, files): - """Gets all the messages from a specified file. - - This will find and resolve dependencies, failing if the descriptor - pool cannot satisfy them. - - Args: - files: The file names to extract messages from. - - Returns: - A dictionary mapping proto names to the message classes. This will include - any dependent messages as well as any messages defined in the same file as - a specified message. - """ - result = {} - for file_name in files: - file_desc = self.pool.FindFileByName(file_name) - for desc in file_desc.message_types_by_name.values(): - result[desc.full_name] = self.GetPrototype(desc) - - # While the extension FieldDescriptors are created by the descriptor pool, - # the python classes created in the factory need them to be registered - # explicitly, which is done below. - # - # The call to RegisterExtension will specifically check if the - # extension was already registered on the object and either - # ignore the registration if the original was the same, or raise - # an error if they were different. - - for extension in file_desc.extensions_by_name.values(): - if extension.containing_type not in self._classes: - self.GetPrototype(extension.containing_type) - extended_class = self._classes[extension.containing_type] - extended_class.RegisterExtension(extension) - return result - - -_FACTORY = MessageFactory() - - -def GetMessages(file_protos): - """Builds a dictionary of all the messages available in a set of files. - - Args: - file_protos: Iterable of FileDescriptorProto to build messages out of. - - Returns: - A dictionary mapping proto names to the message classes. This will include - any dependent messages as well as any messages defined in the same file as - a specified message. - """ - # The cpp implementation of the protocol buffer library requires to add the - # message in topological order of the dependency graph. - file_by_name = {file_proto.name: file_proto for file_proto in file_protos} - def _AddFile(file_proto): - for dependency in file_proto.dependency: - if dependency in file_by_name: - # Remove from elements to be visited, in order to cut cycles. - _AddFile(file_by_name.pop(dependency)) - _FACTORY.pool.Add(file_proto) - while file_by_name: - _AddFile(file_by_name.popitem()[1]) - return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/proto_builder.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/proto_builder.py deleted file mode 100644 index a4667ce63e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/proto_builder.py +++ /dev/null @@ -1,134 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Dynamic Protobuf class creator.""" - -from collections import OrderedDict -import hashlib -import os - -from google.protobuf import descriptor_pb2 -from google.protobuf import descriptor -from google.protobuf import message_factory - - -def _GetMessageFromFactory(factory, full_name): - """Get a proto class from the MessageFactory by name. - - Args: - factory: a MessageFactory instance. - full_name: str, the fully qualified name of the proto type. - Returns: - A class, for the type identified by full_name. - Raises: - KeyError, if the proto is not found in the factory's descriptor pool. - """ - proto_descriptor = factory.pool.FindMessageTypeByName(full_name) - proto_cls = factory.GetPrototype(proto_descriptor) - return proto_cls - - -def MakeSimpleProtoClass(fields, full_name=None, pool=None): - """Create a Protobuf class whose fields are basic types. - - Note: this doesn't validate field names! - - Args: - fields: dict of {name: field_type} mappings for each field in the proto. If - this is an OrderedDict the order will be maintained, otherwise the - fields will be sorted by name. - full_name: optional str, the fully-qualified name of the proto type. - pool: optional DescriptorPool instance. - Returns: - a class, the new protobuf class with a FileDescriptor. - """ - factory = message_factory.MessageFactory(pool=pool) - - if full_name is not None: - try: - proto_cls = _GetMessageFromFactory(factory, full_name) - return proto_cls - except KeyError: - # The factory's DescriptorPool doesn't know about this class yet. - pass - - # Get a list of (name, field_type) tuples from the fields dict. If fields was - # an OrderedDict we keep the order, but otherwise we sort the field to ensure - # consistent ordering. - field_items = fields.items() - if not isinstance(fields, OrderedDict): - field_items = sorted(field_items) - - # Use a consistent file name that is unlikely to conflict with any imported - # proto files. - fields_hash = hashlib.sha1() - for f_name, f_type in field_items: - fields_hash.update(f_name.encode('utf-8')) - fields_hash.update(str(f_type).encode('utf-8')) - proto_file_name = fields_hash.hexdigest() + '.proto' - - # If the proto is anonymous, use the same hash to name it. - if full_name is None: - full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + - fields_hash.hexdigest()) - try: - proto_cls = _GetMessageFromFactory(factory, full_name) - return proto_cls - except KeyError: - # The factory's DescriptorPool doesn't know about this class yet. - pass - - # This is the first time we see this proto: add a new descriptor to the pool. - factory.pool.Add( - _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) - return _GetMessageFromFactory(factory, full_name) - - -def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): - """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" - package, name = full_name.rsplit('.', 1) - file_proto = descriptor_pb2.FileDescriptorProto() - file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) - file_proto.package = package - desc_proto = file_proto.message_type.add() - desc_proto.name = name - for f_number, (f_name, f_type) in enumerate(field_items, 1): - field_proto = desc_proto.field.add() - field_proto.name = f_name - # # If the number falls in the reserved range, reassign it to the correct - # # number after the range. - if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: - f_number += ( - descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - - descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) - field_proto.number = f_number - field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL - field_proto.type = f_type - return file_proto diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/__init__.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/cpp_message.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/cpp_message.py deleted file mode 100644 index fc8eb32d79..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/cpp_message.py +++ /dev/null @@ -1,65 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Protocol message implementation hooks for C++ implementation. - -Contains helper functions used to create protocol message classes from -Descriptor objects at runtime backed by the protocol buffer C++ API. -""" - -__author__ = 'tibell@google.com (Johan Tibell)' - -from google.protobuf.pyext import _message - - -class GeneratedProtocolMessageType(_message.MessageMeta): - - """Metaclass for protocol message classes created at runtime from Descriptors. - - The protocol compiler currently uses this metaclass to create protocol - message classes at runtime. Clients can also manually create their own - classes at runtime, as in this example: - - mydescriptor = Descriptor(.....) - factory = symbol_database.Default() - factory.pool.AddDescriptor(mydescriptor) - MyProtoClass = factory.GetPrototype(mydescriptor) - myproto_instance = MyProtoClass() - myproto.foo_field = 23 - ... - - The above example will not work for nested types. If you wish to include them, - use reflection.MakeClass() instead of manually instantiating the class in - order to create the appropriate class structure. - """ - - # Must be consistent with the protocol-compiler code in - # proto2/compiler/internal/generator.*. - _DESCRIPTOR_KEY = 'DESCRIPTOR' diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/python_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/python_pb2.py deleted file mode 100644 index 2c6ecf4c98..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/pyext/python_pb2.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/pyext/python.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.pyext.python_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - TestAllExtensions.RegisterExtension(optional_nested_message_extension) - TestAllExtensions.RegisterExtension(repeated_nested_message_extension) - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'H\001' - _TESTALLTYPES._serialized_start=72 - _TESTALLTYPES._serialized_end=388 - _TESTALLTYPES_NESTEDMESSAGE._serialized_start=300 - _TESTALLTYPES_NESTEDMESSAGE._serialized_end=388 - _FOREIGNMESSAGE._serialized_start=390 - _FOREIGNMESSAGE._serialized_end=428 - _TESTALLEXTENSIONS._serialized_start=430 - _TESTALLEXTENSIONS._serialized_end=459 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/reflection.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/reflection.py deleted file mode 100644 index 81e18859a8..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/reflection.py +++ /dev/null @@ -1,95 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -# This code is meant to work on Python 2.4 and above only. - -"""Contains a metaclass and helper functions used to create -protocol message classes from Descriptor objects at runtime. - -Recall that a metaclass is the "type" of a class. -(A class is to a metaclass what an instance is to a class.) - -In this case, we use the GeneratedProtocolMessageType metaclass -to inject all the useful functionality into the classes -output by the protocol compiler at compile-time. - -The upshot of all this is that the real implementation -details for ALL pure-Python protocol buffers are *here in -this file*. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - - -from google.protobuf import message_factory -from google.protobuf import symbol_database - -# The type of all Message classes. -# Part of the public interface, but normally only used by message factories. -GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE - -MESSAGE_CLASS_CACHE = {} - - -# Deprecated. Please NEVER use reflection.ParseMessage(). -def ParseMessage(descriptor, byte_str): - """Generate a new Message instance from this Descriptor and a byte string. - - DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). - Please use MessageFactory.GetPrototype() instead. - - Args: - descriptor: Protobuf Descriptor object - byte_str: Serialized protocol buffer byte string - - Returns: - Newly created protobuf Message object. - """ - result_class = MakeClass(descriptor) - new_msg = result_class() - new_msg.ParseFromString(byte_str) - return new_msg - - -# Deprecated. Please NEVER use reflection.MakeClass(). -def MakeClass(descriptor): - """Construct a class object for a protobuf described by descriptor. - - DEPRECATED: use MessageFactory.GetPrototype() instead. - - Args: - descriptor: A descriptor.Descriptor object describing the protobuf. - Returns: - The Message class object described by the descriptor. - """ - # Original implementation leads to duplicate message classes, which won't play - # well with extensions. Message factory info is also missing. - # Redirect to message_factory. - return symbol_database.Default().GetPrototype(descriptor) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service.py deleted file mode 100644 index 5625246324..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service.py +++ /dev/null @@ -1,228 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""DEPRECATED: Declares the RPC service interfaces. - -This module declares the abstract interfaces underlying proto2 RPC -services. These are intended to be independent of any particular RPC -implementation, so that proto2 services can be used on top of a variety -of implementations. Starting with version 2.3.0, RPC implementations should -not try to build on these, but should instead provide code generator plugins -which generate code specific to the particular RPC implementation. This way -the generated code can be more appropriate for the implementation in use -and can avoid unnecessary layers of indirection. -""" - -__author__ = 'petar@google.com (Petar Petrov)' - - -class RpcException(Exception): - """Exception raised on failed blocking RPC method call.""" - pass - - -class Service(object): - - """Abstract base interface for protocol-buffer-based RPC services. - - Services themselves are abstract classes (implemented either by servers or as - stubs), but they subclass this base interface. The methods of this - interface can be used to call the methods of the service without knowing - its exact type at compile time (analogous to the Message interface). - """ - - def GetDescriptor(): - """Retrieves this service's descriptor.""" - raise NotImplementedError - - def CallMethod(self, method_descriptor, rpc_controller, - request, done): - """Calls a method of the service specified by method_descriptor. - - If "done" is None then the call is blocking and the response - message will be returned directly. Otherwise the call is asynchronous - and "done" will later be called with the response value. - - In the blocking case, RpcException will be raised on error. - - Preconditions: - - * method_descriptor.service == GetDescriptor - * request is of the exact same classes as returned by - GetRequestClass(method). - * After the call has started, the request must not be modified. - * "rpc_controller" is of the correct type for the RPC implementation being - used by this Service. For stubs, the "correct type" depends on the - RpcChannel which the stub is using. - - Postconditions: - - * "done" will be called when the method is complete. This may be - before CallMethod() returns or it may be at some point in the future. - * If the RPC failed, the response value passed to "done" will be None. - Further details about the failure can be found by querying the - RpcController. - """ - raise NotImplementedError - - def GetRequestClass(self, method_descriptor): - """Returns the class of the request message for the specified method. - - CallMethod() requires that the request is of a particular subclass of - Message. GetRequestClass() gets the default instance of this required - type. - - Example: - method = service.GetDescriptor().FindMethodByName("Foo") - request = stub.GetRequestClass(method)() - request.ParseFromString(input) - service.CallMethod(method, request, callback) - """ - raise NotImplementedError - - def GetResponseClass(self, method_descriptor): - """Returns the class of the response message for the specified method. - - This method isn't really needed, as the RpcChannel's CallMethod constructs - the response protocol message. It's provided anyway in case it is useful - for the caller to know the response type in advance. - """ - raise NotImplementedError - - -class RpcController(object): - - """An RpcController mediates a single method call. - - The primary purpose of the controller is to provide a way to manipulate - settings specific to the RPC implementation and to find out about RPC-level - errors. The methods provided by the RpcController interface are intended - to be a "least common denominator" set of features which we expect all - implementations to support. Specific implementations may provide more - advanced features (e.g. deadline propagation). - """ - - # Client-side methods below - - def Reset(self): - """Resets the RpcController to its initial state. - - After the RpcController has been reset, it may be reused in - a new call. Must not be called while an RPC is in progress. - """ - raise NotImplementedError - - def Failed(self): - """Returns true if the call failed. - - After a call has finished, returns true if the call failed. The possible - reasons for failure depend on the RPC implementation. Failed() must not - be called before a call has finished. If Failed() returns true, the - contents of the response message are undefined. - """ - raise NotImplementedError - - def ErrorText(self): - """If Failed is true, returns a human-readable description of the error.""" - raise NotImplementedError - - def StartCancel(self): - """Initiate cancellation. - - Advises the RPC system that the caller desires that the RPC call be - canceled. The RPC system may cancel it immediately, may wait awhile and - then cancel it, or may not even cancel the call at all. If the call is - canceled, the "done" callback will still be called and the RpcController - will indicate that the call failed at that time. - """ - raise NotImplementedError - - # Server-side methods below - - def SetFailed(self, reason): - """Sets a failure reason. - - Causes Failed() to return true on the client side. "reason" will be - incorporated into the message returned by ErrorText(). If you find - you need to return machine-readable information about failures, you - should incorporate it into your response protocol buffer and should - NOT call SetFailed(). - """ - raise NotImplementedError - - def IsCanceled(self): - """Checks if the client cancelled the RPC. - - If true, indicates that the client canceled the RPC, so the server may - as well give up on replying to it. The server should still call the - final "done" callback. - """ - raise NotImplementedError - - def NotifyOnCancel(self, callback): - """Sets a callback to invoke on cancel. - - Asks that the given callback be called when the RPC is canceled. The - callback will always be called exactly once. If the RPC completes without - being canceled, the callback will be called after completion. If the RPC - has already been canceled when NotifyOnCancel() is called, the callback - will be called immediately. - - NotifyOnCancel() must be called no more than once per request. - """ - raise NotImplementedError - - -class RpcChannel(object): - - """Abstract interface for an RPC channel. - - An RpcChannel represents a communication line to a service which can be used - to call that service's methods. The service may be running on another - machine. Normally, you should not use an RpcChannel directly, but instead - construct a stub {@link Service} wrapping it. Example: - - Example: - RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") - RpcController controller = rpcImpl.Controller() - MyService service = MyService_Stub(channel) - service.MyMethod(controller, request, callback) - """ - - def CallMethod(self, method_descriptor, rpc_controller, - request, response_class, done): - """Calls the method identified by the descriptor. - - Call the given method of the remote service. The signature of this - procedure looks the same as Service.CallMethod(), but the requirements - are less strict in one important way: the request object doesn't have to - be of any specific class as long as its descriptor is method.input_type. - """ - raise NotImplementedError diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service_reflection.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service_reflection.py deleted file mode 100644 index f82ab7145a..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/service_reflection.py +++ /dev/null @@ -1,295 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains metaclasses used to create protocol service and service stub -classes from ServiceDescriptor objects at runtime. - -The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to -inject all useful functionality into the classes output by the protocol -compiler at compile-time. -""" - -__author__ = 'petar@google.com (Petar Petrov)' - - -class GeneratedServiceType(type): - - """Metaclass for service classes created at runtime from ServiceDescriptors. - - Implementations for all methods described in the Service class are added here - by this class. We also create properties to allow getting/setting all fields - in the protocol message. - - The protocol compiler currently uses this metaclass to create protocol service - classes at runtime. Clients can also manually create their own classes at - runtime, as in this example:: - - mydescriptor = ServiceDescriptor(.....) - class MyProtoService(service.Service): - __metaclass__ = GeneratedServiceType - DESCRIPTOR = mydescriptor - myservice_instance = MyProtoService() - # ... - """ - - _DESCRIPTOR_KEY = 'DESCRIPTOR' - - def __init__(cls, name, bases, dictionary): - """Creates a message service class. - - Args: - name: Name of the class (ignored, but required by the metaclass - protocol). - bases: Base classes of the class being constructed. - dictionary: The class dictionary of the class being constructed. - dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object - describing this protocol service type. - """ - # Don't do anything if this class doesn't have a descriptor. This happens - # when a service class is subclassed. - if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: - return - - descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] - service_builder = _ServiceBuilder(descriptor) - service_builder.BuildService(cls) - cls.DESCRIPTOR = descriptor - - -class GeneratedServiceStubType(GeneratedServiceType): - - """Metaclass for service stubs created at runtime from ServiceDescriptors. - - This class has similar responsibilities as GeneratedServiceType, except that - it creates the service stub classes. - """ - - _DESCRIPTOR_KEY = 'DESCRIPTOR' - - def __init__(cls, name, bases, dictionary): - """Creates a message service stub class. - - Args: - name: Name of the class (ignored, here). - bases: Base classes of the class being constructed. - dictionary: The class dictionary of the class being constructed. - dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object - describing this protocol service type. - """ - super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) - # Don't do anything if this class doesn't have a descriptor. This happens - # when a service stub is subclassed. - if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: - return - - descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] - service_stub_builder = _ServiceStubBuilder(descriptor) - service_stub_builder.BuildServiceStub(cls) - - -class _ServiceBuilder(object): - - """This class constructs a protocol service class using a service descriptor. - - Given a service descriptor, this class constructs a class that represents - the specified service descriptor. One service builder instance constructs - exactly one service class. That means all instances of that class share the - same builder. - """ - - def __init__(self, service_descriptor): - """Initializes an instance of the service class builder. - - Args: - service_descriptor: ServiceDescriptor to use when constructing the - service class. - """ - self.descriptor = service_descriptor - - def BuildService(builder, cls): - """Constructs the service class. - - Args: - cls: The class that will be constructed. - """ - - # CallMethod needs to operate with an instance of the Service class. This - # internal wrapper function exists only to be able to pass the service - # instance to the method that does the real CallMethod work. - # Making sure to use exact argument names from the abstract interface in - # service.py to match the type signature - def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): - return builder._CallMethod(self, method_descriptor, rpc_controller, - request, done) - - def _WrapGetRequestClass(self, method_descriptor): - return builder._GetRequestClass(method_descriptor) - - def _WrapGetResponseClass(self, method_descriptor): - return builder._GetResponseClass(method_descriptor) - - builder.cls = cls - cls.CallMethod = _WrapCallMethod - cls.GetDescriptor = staticmethod(lambda: builder.descriptor) - cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' - cls.GetRequestClass = _WrapGetRequestClass - cls.GetResponseClass = _WrapGetResponseClass - for method in builder.descriptor.methods: - setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) - - def _CallMethod(self, srvc, method_descriptor, - rpc_controller, request, callback): - """Calls the method described by a given method descriptor. - - Args: - srvc: Instance of the service for which this method is called. - method_descriptor: Descriptor that represent the method to call. - rpc_controller: RPC controller to use for this method's execution. - request: Request protocol message. - callback: A callback to invoke after the method has completed. - """ - if method_descriptor.containing_service != self.descriptor: - raise RuntimeError( - 'CallMethod() given method descriptor for wrong service type.') - method = getattr(srvc, method_descriptor.name) - return method(rpc_controller, request, callback) - - def _GetRequestClass(self, method_descriptor): - """Returns the class of the request protocol message. - - Args: - method_descriptor: Descriptor of the method for which to return the - request protocol message class. - - Returns: - A class that represents the input protocol message of the specified - method. - """ - if method_descriptor.containing_service != self.descriptor: - raise RuntimeError( - 'GetRequestClass() given method descriptor for wrong service type.') - return method_descriptor.input_type._concrete_class - - def _GetResponseClass(self, method_descriptor): - """Returns the class of the response protocol message. - - Args: - method_descriptor: Descriptor of the method for which to return the - response protocol message class. - - Returns: - A class that represents the output protocol message of the specified - method. - """ - if method_descriptor.containing_service != self.descriptor: - raise RuntimeError( - 'GetResponseClass() given method descriptor for wrong service type.') - return method_descriptor.output_type._concrete_class - - def _GenerateNonImplementedMethod(self, method): - """Generates and returns a method that can be set for a service methods. - - Args: - method: Descriptor of the service method for which a method is to be - generated. - - Returns: - A method that can be added to the service class. - """ - return lambda inst, rpc_controller, request, callback: ( - self._NonImplementedMethod(method.name, rpc_controller, callback)) - - def _NonImplementedMethod(self, method_name, rpc_controller, callback): - """The body of all methods in the generated service class. - - Args: - method_name: Name of the method being executed. - rpc_controller: RPC controller used to execute this method. - callback: A callback which will be invoked when the method finishes. - """ - rpc_controller.SetFailed('Method %s not implemented.' % method_name) - callback(None) - - -class _ServiceStubBuilder(object): - - """Constructs a protocol service stub class using a service descriptor. - - Given a service descriptor, this class constructs a suitable stub class. - A stub is just a type-safe wrapper around an RpcChannel which emulates a - local implementation of the service. - - One service stub builder instance constructs exactly one class. It means all - instances of that class share the same service stub builder. - """ - - def __init__(self, service_descriptor): - """Initializes an instance of the service stub class builder. - - Args: - service_descriptor: ServiceDescriptor to use when constructing the - stub class. - """ - self.descriptor = service_descriptor - - def BuildServiceStub(self, cls): - """Constructs the stub class. - - Args: - cls: The class that will be constructed. - """ - - def _ServiceStubInit(stub, rpc_channel): - stub.rpc_channel = rpc_channel - self.cls = cls - cls.__init__ = _ServiceStubInit - for method in self.descriptor.methods: - setattr(cls, method.name, self._GenerateStubMethod(method)) - - def _GenerateStubMethod(self, method): - return (lambda inst, rpc_controller, request, callback=None: - self._StubMethod(inst, method, rpc_controller, request, callback)) - - def _StubMethod(self, stub, method_descriptor, - rpc_controller, request, callback): - """The body of all service methods in the generated stub class. - - Args: - stub: Stub instance. - method_descriptor: Descriptor of the invoked method. - rpc_controller: Rpc controller to execute the method. - request: Request protocol message. - callback: A callback to execute when the method finishes. - Returns: - Response message (in case of blocking call). - """ - return stub.rpc_channel.CallMethod( - method_descriptor, rpc_controller, request, - method_descriptor.output_type._concrete_class, callback) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/source_context_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/source_context_pb2.py deleted file mode 100644 index 30cca2e06e..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/source_context_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/source_context.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _SOURCECONTEXT._serialized_start=57 - _SOURCECONTEXT._serialized_end=91 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/struct_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/struct_pb2.py deleted file mode 100644 index 149728ca08..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/struct_pb2.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/struct.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _STRUCT_FIELDSENTRY._options = None - _STRUCT_FIELDSENTRY._serialized_options = b'8\001' - _NULLVALUE._serialized_start=474 - _NULLVALUE._serialized_end=501 - _STRUCT._serialized_start=50 - _STRUCT._serialized_end=182 - _STRUCT_FIELDSENTRY._serialized_start=113 - _STRUCT_FIELDSENTRY._serialized_end=182 - _VALUE._serialized_start=185 - _VALUE._serialized_end=419 - _LISTVALUE._serialized_start=421 - _LISTVALUE._serialized_end=472 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/symbol_database.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/symbol_database.py deleted file mode 100644 index fdcf8cf06c..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/symbol_database.py +++ /dev/null @@ -1,194 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""A database of Python protocol buffer generated symbols. - -SymbolDatabase is the MessageFactory for messages generated at compile time, -and makes it easy to create new instances of a registered type, given only the -type's protocol buffer symbol name. - -Example usage:: - - db = symbol_database.SymbolDatabase() - - # Register symbols of interest, from one or multiple files. - db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) - db.RegisterMessage(my_proto_pb2.MyMessage) - db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) - - # The database can be used as a MessageFactory, to generate types based on - # their name: - types = db.GetMessages(['my_proto.proto']) - my_message_instance = types['MyMessage']() - - # The database's underlying descriptor pool can be queried, so it's not - # necessary to know a type's filename to be able to generate it: - filename = db.pool.FindFileContainingSymbol('MyMessage') - my_message_instance = db.GetMessages([filename])['MyMessage']() - - # This functionality is also provided directly via a convenience method: - my_message_instance = db.GetSymbol('MyMessage')() -""" - - -from google.protobuf.internal import api_implementation -from google.protobuf import descriptor_pool -from google.protobuf import message_factory - - -class SymbolDatabase(message_factory.MessageFactory): - """A database of Python generated symbols.""" - - def RegisterMessage(self, message): - """Registers the given message type in the local database. - - Calls to GetSymbol() and GetMessages() will return messages registered here. - - Args: - message: A :class:`google.protobuf.message.Message` subclass (or - instance); its descriptor will be registered. - - Returns: - The provided message. - """ - - desc = message.DESCRIPTOR - self._classes[desc] = message - self.RegisterMessageDescriptor(desc) - return message - - def RegisterMessageDescriptor(self, message_descriptor): - """Registers the given message descriptor in the local database. - - Args: - message_descriptor (Descriptor): the message descriptor to add. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._AddDescriptor(message_descriptor) - - def RegisterEnumDescriptor(self, enum_descriptor): - """Registers the given enum descriptor in the local database. - - Args: - enum_descriptor (EnumDescriptor): The enum descriptor to register. - - Returns: - EnumDescriptor: The provided descriptor. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._AddEnumDescriptor(enum_descriptor) - return enum_descriptor - - def RegisterServiceDescriptor(self, service_descriptor): - """Registers the given service descriptor in the local database. - - Args: - service_descriptor (ServiceDescriptor): the service descriptor to - register. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._AddServiceDescriptor(service_descriptor) - - def RegisterFileDescriptor(self, file_descriptor): - """Registers the given file descriptor in the local database. - - Args: - file_descriptor (FileDescriptor): The file descriptor to register. - """ - if api_implementation.Type() == 'python': - # pylint: disable=protected-access - self.pool._InternalAddFileDescriptor(file_descriptor) - - def GetSymbol(self, symbol): - """Tries to find a symbol in the local database. - - Currently, this method only returns message.Message instances, however, if - may be extended in future to support other symbol types. - - Args: - symbol (str): a protocol buffer symbol. - - Returns: - A Python class corresponding to the symbol. - - Raises: - KeyError: if the symbol could not be found. - """ - - return self._classes[self.pool.FindMessageTypeByName(symbol)] - - def GetMessages(self, files): - # TODO(amauryfa): Fix the differences with MessageFactory. - """Gets all registered messages from a specified file. - - Only messages already created and registered will be returned; (this is the - case for imported _pb2 modules) - But unlike MessageFactory, this version also returns already defined nested - messages, but does not register any message extensions. - - Args: - files (list[str]): The file names to extract messages from. - - Returns: - A dictionary mapping proto names to the message classes. - - Raises: - KeyError: if a file could not be found. - """ - - def _GetAllMessages(desc): - """Walk a message Descriptor and recursively yields all message names.""" - yield desc - for msg_desc in desc.nested_types: - for nested_desc in _GetAllMessages(msg_desc): - yield nested_desc - - result = {} - for file_name in files: - file_desc = self.pool.FindFileByName(file_name) - for msg_desc in file_desc.message_types_by_name.values(): - for desc in _GetAllMessages(msg_desc): - try: - result[desc.full_name] = self._classes[desc] - except KeyError: - # This descriptor has no registered class, skip it. - pass - return result - - -_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) - - -def Default(): - """Returns the default SymbolDatabase.""" - return _DEFAULT diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_encoding.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_encoding.py deleted file mode 100644 index 759cf11f62..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_encoding.py +++ /dev/null @@ -1,110 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Encoding related utilities.""" -import re - -_cescape_chr_to_symbol_map = {} -_cescape_chr_to_symbol_map[9] = r'\t' # optional escape -_cescape_chr_to_symbol_map[10] = r'\n' # optional escape -_cescape_chr_to_symbol_map[13] = r'\r' # optional escape -_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape -_cescape_chr_to_symbol_map[39] = r"\'" # optional escape -_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape - -# Lookup table for unicode -_cescape_unicode_to_str = [chr(i) for i in range(0, 256)] -for byte, string in _cescape_chr_to_symbol_map.items(): - _cescape_unicode_to_str[byte] = string - -# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) -_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + - [chr(i) for i in range(32, 127)] + - [r'\%03o' % i for i in range(127, 256)]) -for byte, string in _cescape_chr_to_symbol_map.items(): - _cescape_byte_to_str[byte] = string -del byte, string - - -def CEscape(text, as_utf8): - # type: (...) -> str - """Escape a bytes string for use in an text protocol buffer. - - Args: - text: A byte string to be escaped. - as_utf8: Specifies if result may contain non-ASCII characters. - In Python 3 this allows unescaped non-ASCII Unicode characters. - In Python 2 the return value will be valid UTF-8 rather than only ASCII. - Returns: - Escaped string (str). - """ - # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not - # satisfy our needs; they encodes unprintable characters using two-digit hex - # escapes whereas our C++ unescaping function allows hex escapes to be any - # length. So, "\0011".encode('string_escape') ends up being "\\x011", which - # will be decoded in C++ as a single-character string with char code 0x11. - text_is_unicode = isinstance(text, str) - if as_utf8 and text_is_unicode: - # We're already unicode, no processing beyond control char escapes. - return text.translate(_cescape_chr_to_symbol_map) - ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints. - if as_utf8: - return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) - return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) - - -_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') - - -def CUnescape(text): - # type: (str) -> bytes - """Unescape a text string with C-style escape sequences to UTF-8 bytes. - - Args: - text: The data to parse in a str. - Returns: - A byte string. - """ - - def ReplaceHex(m): - # Only replace the match if the number of leading back slashes is odd. i.e. - # the slash itself is not escaped. - if len(m.group(1)) & 1: - return m.group(1) + 'x0' + m.group(2) - return m.group(0) - - # This is required because the 'string_escape' encoding doesn't - # allow single-digit hex escapes (like '\xf'). - result = _CUNESCAPE_HEX.sub(ReplaceHex, text) - - return (result.encode('utf-8') # Make it bytes to allow decode. - .decode('unicode_escape') - # Make it bytes again to return the proper type. - .encode('raw_unicode_escape')) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_format.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_format.py deleted file mode 100644 index 412385c26f..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/text_format.py +++ /dev/null @@ -1,1795 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Contains routines for printing protocol messages in text format. - -Simple usage example:: - - # Create a proto object and serialize it to a text proto string. - message = my_proto_pb2.MyMessage(foo='bar') - text_proto = text_format.MessageToString(message) - - # Parse a text proto string. - message = text_format.Parse(text_proto, my_proto_pb2.MyMessage()) -""" - -__author__ = 'kenton@google.com (Kenton Varda)' - -# TODO(b/129989314) Import thread contention leads to test failures. -import encodings.raw_unicode_escape # pylint: disable=unused-import -import encodings.unicode_escape # pylint: disable=unused-import -import io -import math -import re - -from google.protobuf.internal import decoder -from google.protobuf.internal import type_checkers -from google.protobuf import descriptor -from google.protobuf import text_encoding - -# pylint: disable=g-import-not-at-top -__all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', - 'PrintFieldValue', 'Merge', 'MessageToBytes'] - -_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), - type_checkers.Int32ValueChecker(), - type_checkers.Uint64ValueChecker(), - type_checkers.Int64ValueChecker()) -_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) -_FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) -_QUOTES = frozenset(("'", '"')) -_ANY_FULL_TYPE_NAME = 'google.protobuf.Any' - - -class Error(Exception): - """Top-level module error for text_format.""" - - -class ParseError(Error): - """Thrown in case of text parsing or tokenizing error.""" - - def __init__(self, message=None, line=None, column=None): - if message is not None and line is not None: - loc = str(line) - if column is not None: - loc += ':{0}'.format(column) - message = '{0} : {1}'.format(loc, message) - if message is not None: - super(ParseError, self).__init__(message) - else: - super(ParseError, self).__init__() - self._line = line - self._column = column - - def GetLine(self): - return self._line - - def GetColumn(self): - return self._column - - -class TextWriter(object): - - def __init__(self, as_utf8): - self._writer = io.StringIO() - - def write(self, val): - return self._writer.write(val) - - def close(self): - return self._writer.close() - - def getvalue(self): - return self._writer.getvalue() - - -def MessageToString( - message, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - use_field_number=False, - descriptor_pool=None, - indent=0, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - # type: (...) -> str - """Convert protobuf message to text format. - - Double values can be formatted compactly with 15 digits of - precision (which is the most that IEEE 754 "double" can guarantee) - using double_format='.15g'. To ensure that converting to text and back to a - proto will result in an identical value, double_format='.17g' should be used. - - Args: - message: The protocol buffers message. - as_utf8: Return unescaped Unicode for non-ASCII characters. - In Python 3 actual Unicode characters may appear as is in strings. - In Python 2 the return value will be valid UTF-8 rather than only ASCII. - as_one_line: Don't introduce newlines between fields. - use_short_repeated_primitives: Use short repeated format for primitives. - pointy_brackets: If True, use angle brackets instead of curly braces for - nesting. - use_index_order: If True, fields of a proto message will be printed using - the order defined in source code instead of the field number, extensions - will be printed at the end of the message and their relative order is - determined by the extension number. By default, use the field number - order. - float_format (str): If set, use this to specify float field formatting - (per the "Format Specification Mini-Language"); otherwise, shortest float - that has same value in wire will be printed. Also affect double field - if double_format is not set but float_format is set. - double_format (str): If set, use this to specify double field formatting - (per the "Format Specification Mini-Language"); if it is not set but - float_format is set, use float_format. Otherwise, use ``str()`` - use_field_number: If True, print field numbers instead of names. - descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. - indent (int): The initial indent level, in terms of spaces, for pretty - print. - message_formatter (function(message, indent, as_one_line) -> unicode|None): - Custom formatter for selected sub-messages (usually based on message - type). Use to pretty print parts of the protobuf for easier diffing. - print_unknown_fields: If True, unknown fields will be printed. - force_colon: If set, a colon will be added after the field name even if the - field is a proto message. - - Returns: - str: A string of the text formatted protocol buffer message. - """ - out = TextWriter(as_utf8) - printer = _Printer( - out, - indent, - as_utf8, - as_one_line, - use_short_repeated_primitives, - pointy_brackets, - use_index_order, - float_format, - double_format, - use_field_number, - descriptor_pool, - message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintMessage(message) - result = out.getvalue() - out.close() - if as_one_line: - return result.rstrip() - return result - - -def MessageToBytes(message, **kwargs): - # type: (...) -> bytes - """Convert protobuf message to encoded text format. See MessageToString.""" - text = MessageToString(message, **kwargs) - if isinstance(text, bytes): - return text - codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii' - return text.encode(codec) - - -def _IsMapEntry(field): - return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and - field.message_type.has_options and - field.message_type.GetOptions().map_entry) - - -def PrintMessage(message, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - use_field_number=False, - descriptor_pool=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - printer = _Printer( - out=out, indent=indent, as_utf8=as_utf8, - as_one_line=as_one_line, - use_short_repeated_primitives=use_short_repeated_primitives, - pointy_brackets=pointy_brackets, - use_index_order=use_index_order, - float_format=float_format, - double_format=double_format, - use_field_number=use_field_number, - descriptor_pool=descriptor_pool, - message_formatter=message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintMessage(message) - - -def PrintField(field, - value, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - """Print a single field name/value pair.""" - printer = _Printer(out, indent, as_utf8, as_one_line, - use_short_repeated_primitives, pointy_brackets, - use_index_order, float_format, double_format, - message_formatter=message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintField(field, value) - - -def PrintFieldValue(field, - value, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - """Print a single field value (not including name).""" - printer = _Printer(out, indent, as_utf8, as_one_line, - use_short_repeated_primitives, pointy_brackets, - use_index_order, float_format, double_format, - message_formatter=message_formatter, - print_unknown_fields=print_unknown_fields, - force_colon=force_colon) - printer.PrintFieldValue(field, value) - - -def _BuildMessageFromTypeName(type_name, descriptor_pool): - """Returns a protobuf message instance. - - Args: - type_name: Fully-qualified protobuf message type name string. - descriptor_pool: DescriptorPool instance. - - Returns: - A Message instance of type matching type_name, or None if the a Descriptor - wasn't found matching type_name. - """ - # pylint: disable=g-import-not-at-top - if descriptor_pool is None: - from google.protobuf import descriptor_pool as pool_mod - descriptor_pool = pool_mod.Default() - from google.protobuf import symbol_database - database = symbol_database.Default() - try: - message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) - except KeyError: - return None - message_type = database.GetPrototype(message_descriptor) - return message_type() - - -# These values must match WireType enum in google/protobuf/wire_format.h. -WIRETYPE_LENGTH_DELIMITED = 2 -WIRETYPE_START_GROUP = 3 - - -class _Printer(object): - """Text format printer for protocol message.""" - - def __init__( - self, - out, - indent=0, - as_utf8=False, - as_one_line=False, - use_short_repeated_primitives=False, - pointy_brackets=False, - use_index_order=False, - float_format=None, - double_format=None, - use_field_number=False, - descriptor_pool=None, - message_formatter=None, - print_unknown_fields=False, - force_colon=False): - """Initialize the Printer. - - Double values can be formatted compactly with 15 digits of precision - (which is the most that IEEE 754 "double" can guarantee) using - double_format='.15g'. To ensure that converting to text and back to a proto - will result in an identical value, double_format='.17g' should be used. - - Args: - out: To record the text format result. - indent: The initial indent level for pretty print. - as_utf8: Return unescaped Unicode for non-ASCII characters. - In Python 3 actual Unicode characters may appear as is in strings. - In Python 2 the return value will be valid UTF-8 rather than ASCII. - as_one_line: Don't introduce newlines between fields. - use_short_repeated_primitives: Use short repeated format for primitives. - pointy_brackets: If True, use angle brackets instead of curly braces for - nesting. - use_index_order: If True, print fields of a proto message using the order - defined in source code instead of the field number. By default, use the - field number order. - float_format: If set, use this to specify float field formatting - (per the "Format Specification Mini-Language"); otherwise, shortest - float that has same value in wire will be printed. Also affect double - field if double_format is not set but float_format is set. - double_format: If set, use this to specify double field formatting - (per the "Format Specification Mini-Language"); if it is not set but - float_format is set, use float_format. Otherwise, str() is used. - use_field_number: If True, print field numbers instead of names. - descriptor_pool: A DescriptorPool used to resolve Any types. - message_formatter: A function(message, indent, as_one_line): unicode|None - to custom format selected sub-messages (usually based on message type). - Use to pretty print parts of the protobuf for easier diffing. - print_unknown_fields: If True, unknown fields will be printed. - force_colon: If set, a colon will be added after the field name even if - the field is a proto message. - """ - self.out = out - self.indent = indent - self.as_utf8 = as_utf8 - self.as_one_line = as_one_line - self.use_short_repeated_primitives = use_short_repeated_primitives - self.pointy_brackets = pointy_brackets - self.use_index_order = use_index_order - self.float_format = float_format - if double_format is not None: - self.double_format = double_format - else: - self.double_format = float_format - self.use_field_number = use_field_number - self.descriptor_pool = descriptor_pool - self.message_formatter = message_formatter - self.print_unknown_fields = print_unknown_fields - self.force_colon = force_colon - - def _TryPrintAsAnyMessage(self, message): - """Serializes if message is a google.protobuf.Any field.""" - if '/' not in message.type_url: - return False - packed_message = _BuildMessageFromTypeName(message.TypeName(), - self.descriptor_pool) - if packed_message: - packed_message.MergeFromString(message.value) - colon = ':' if self.force_colon else '' - self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon)) - self._PrintMessageFieldValue(packed_message) - self.out.write(' ' if self.as_one_line else '\n') - return True - else: - return False - - def _TryCustomFormatMessage(self, message): - formatted = self.message_formatter(message, self.indent, self.as_one_line) - if formatted is None: - return False - - out = self.out - out.write(' ' * self.indent) - out.write(formatted) - out.write(' ' if self.as_one_line else '\n') - return True - - def PrintMessage(self, message): - """Convert protobuf message to text format. - - Args: - message: The protocol buffers message. - """ - if self.message_formatter and self._TryCustomFormatMessage(message): - return - if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and - self._TryPrintAsAnyMessage(message)): - return - fields = message.ListFields() - if self.use_index_order: - fields.sort( - key=lambda x: x[0].number if x[0].is_extension else x[0].index) - for field, value in fields: - if _IsMapEntry(field): - for key in sorted(value): - # This is slow for maps with submessage entries because it copies the - # entire tree. Unfortunately this would take significant refactoring - # of this file to work around. - # - # TODO(haberman): refactor and optimize if this becomes an issue. - entry_submsg = value.GetEntryClass()(key=key, value=value[key]) - self.PrintField(field, entry_submsg) - elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - if (self.use_short_repeated_primitives - and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE - and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING): - self._PrintShortRepeatedPrimitivesValue(field, value) - else: - for element in value: - self.PrintField(field, element) - else: - self.PrintField(field, value) - - if self.print_unknown_fields: - self._PrintUnknownFields(message.UnknownFields()) - - def _PrintUnknownFields(self, unknown_fields): - """Print unknown fields.""" - out = self.out - for field in unknown_fields: - out.write(' ' * self.indent) - out.write(str(field.field_number)) - if field.wire_type == WIRETYPE_START_GROUP: - if self.as_one_line: - out.write(' { ') - else: - out.write(' {\n') - self.indent += 2 - - self._PrintUnknownFields(field.data) - - if self.as_one_line: - out.write('} ') - else: - self.indent -= 2 - out.write(' ' * self.indent + '}\n') - elif field.wire_type == WIRETYPE_LENGTH_DELIMITED: - try: - # If this field is parseable as a Message, it is probably - # an embedded message. - # pylint: disable=protected-access - (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet( - memoryview(field.data), 0, len(field.data)) - except Exception: # pylint: disable=broad-except - pos = 0 - - if pos == len(field.data): - if self.as_one_line: - out.write(' { ') - else: - out.write(' {\n') - self.indent += 2 - - self._PrintUnknownFields(embedded_unknown_message) - - if self.as_one_line: - out.write('} ') - else: - self.indent -= 2 - out.write(' ' * self.indent + '}\n') - else: - # A string or bytes field. self.as_utf8 may not work. - out.write(': \"') - out.write(text_encoding.CEscape(field.data, False)) - out.write('\" ' if self.as_one_line else '\"\n') - else: - # varint, fixed32, fixed64 - out.write(': ') - out.write(str(field.data)) - out.write(' ' if self.as_one_line else '\n') - - def _PrintFieldName(self, field): - """Print field name.""" - out = self.out - out.write(' ' * self.indent) - if self.use_field_number: - out.write(str(field.number)) - else: - if field.is_extension: - out.write('[') - if (field.containing_type.GetOptions().message_set_wire_format and - field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and - field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL): - out.write(field.message_type.full_name) - else: - out.write(field.full_name) - out.write(']') - elif field.type == descriptor.FieldDescriptor.TYPE_GROUP: - # For groups, use the capitalized name. - out.write(field.message_type.name) - else: - out.write(field.name) - - if (self.force_colon or - field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE): - # The colon is optional in this case, but our cross-language golden files - # don't include it. Here, the colon is only included if force_colon is - # set to True - out.write(':') - - def PrintField(self, field, value): - """Print a single field name/value pair.""" - self._PrintFieldName(field) - self.out.write(' ') - self.PrintFieldValue(field, value) - self.out.write(' ' if self.as_one_line else '\n') - - def _PrintShortRepeatedPrimitivesValue(self, field, value): - """"Prints short repeated primitives value.""" - # Note: this is called only when value has at least one element. - self._PrintFieldName(field) - self.out.write(' [') - for i in range(len(value) - 1): - self.PrintFieldValue(field, value[i]) - self.out.write(', ') - self.PrintFieldValue(field, value[-1]) - self.out.write(']') - self.out.write(' ' if self.as_one_line else '\n') - - def _PrintMessageFieldValue(self, value): - if self.pointy_brackets: - openb = '<' - closeb = '>' - else: - openb = '{' - closeb = '}' - - if self.as_one_line: - self.out.write('%s ' % openb) - self.PrintMessage(value) - self.out.write(closeb) - else: - self.out.write('%s\n' % openb) - self.indent += 2 - self.PrintMessage(value) - self.indent -= 2 - self.out.write(' ' * self.indent + closeb) - - def PrintFieldValue(self, field, value): - """Print a single field value (not including name). - - For repeated fields, the value should be a single element. - - Args: - field: The descriptor of the field to be printed. - value: The value of the field. - """ - out = self.out - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - self._PrintMessageFieldValue(value) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: - enum_value = field.enum_type.values_by_number.get(value, None) - if enum_value is not None: - out.write(enum_value.name) - else: - out.write(str(value)) - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: - out.write('\"') - if isinstance(value, str) and not self.as_utf8: - out_value = value.encode('utf-8') - else: - out_value = value - if field.type == descriptor.FieldDescriptor.TYPE_BYTES: - # We always need to escape all binary data in TYPE_BYTES fields. - out_as_utf8 = False - else: - out_as_utf8 = self.as_utf8 - out.write(text_encoding.CEscape(out_value, out_as_utf8)) - out.write('\"') - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: - if value: - out.write('true') - else: - out.write('false') - elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: - if self.float_format is not None: - out.write('{1:{0}}'.format(self.float_format, value)) - else: - if math.isnan(value): - out.write(str(value)) - else: - out.write(str(type_checkers.ToShortestFloat(value))) - elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and - self.double_format is not None): - out.write('{1:{0}}'.format(self.double_format, value)) - else: - out.write(str(value)) - - -def Parse(text, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - NOTE: for historical reasons this function does not clear the input - message. This is different from what the binary msg.ParseFrom(...) does. - If text contains a field already set in message, the value is appended if the - field is repeated. Otherwise, an error is raised. - - Example:: - - a = MyProto() - a.repeated_field.append('test') - b = MyProto() - - # Repeated fields are combined - text_format.Parse(repr(a), b) - text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"] - - # Non-repeated fields cannot be overwritten - a.singular_field = 1 - b.singular_field = 2 - text_format.Parse(repr(a), b) # ParseError - - # Binary version: - b.ParseFromString(a.SerializeToString()) # repeated_field is now "test" - - Caller is responsible for clearing the message as needed. - - Args: - text (str): Message text representation. - message (Message): A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - Message: The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'), - message, - allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - - -def Merge(text, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - Like Parse(), but allows repeated values for a non-repeated field, and uses - the last one. This means any non-repeated, top-level fields specified in text - replace those in the message. - - Args: - text (str): Message text representation. - message (Message): A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - Message: The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - return MergeLines( - text.split(b'\n' if isinstance(text, bytes) else u'\n'), - message, - allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - - -def ParseLines(lines, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - See Parse() for caveats. - - Args: - lines: An iterable of lines of a message's text representation. - message: A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool: A DescriptorPool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - parser = _Parser(allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - return parser.ParseLines(lines, message) - - -def MergeLines(lines, - message, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - """Parses a text representation of a protocol message into a message. - - See Merge() for more details. - - Args: - lines: An iterable of lines of a message's text representation. - message: A protocol buffer message to merge into. - allow_unknown_extension: if True, skip over missing extensions and keep - parsing - allow_field_number: if True, both field number and field name are allowed. - descriptor_pool: A DescriptorPool used to resolve Any types. - allow_unknown_field: if True, skip over unknown field and keep - parsing. Avoid to use this option if possible. It may hide some - errors (e.g. spelling error on field name) - - Returns: - The same message passed as argument. - - Raises: - ParseError: On text parsing problems. - """ - parser = _Parser(allow_unknown_extension, - allow_field_number, - descriptor_pool=descriptor_pool, - allow_unknown_field=allow_unknown_field) - return parser.MergeLines(lines, message) - - -class _Parser(object): - """Text format parser for protocol message.""" - - def __init__(self, - allow_unknown_extension=False, - allow_field_number=False, - descriptor_pool=None, - allow_unknown_field=False): - self.allow_unknown_extension = allow_unknown_extension - self.allow_field_number = allow_field_number - self.descriptor_pool = descriptor_pool - self.allow_unknown_field = allow_unknown_field - - def ParseLines(self, lines, message): - """Parses a text representation of a protocol message into a message.""" - self._allow_multiple_scalars = False - self._ParseOrMerge(lines, message) - return message - - def MergeLines(self, lines, message): - """Merges a text representation of a protocol message into a message.""" - self._allow_multiple_scalars = True - self._ParseOrMerge(lines, message) - return message - - def _ParseOrMerge(self, lines, message): - """Converts a text representation of a protocol message into a message. - - Args: - lines: Lines of a message's text representation. - message: A protocol buffer message to merge into. - - Raises: - ParseError: On text parsing problems. - """ - # Tokenize expects native str lines. - str_lines = ( - line if isinstance(line, str) else line.decode('utf-8') - for line in lines) - tokenizer = Tokenizer(str_lines) - while not tokenizer.AtEnd(): - self._MergeField(tokenizer, message) - - def _MergeField(self, tokenizer, message): - """Merges a single protocol message field into a message. - - Args: - tokenizer: A tokenizer to parse the field name and values. - message: A protocol message to record the data. - - Raises: - ParseError: In case of text parsing problems. - """ - message_descriptor = message.DESCRIPTOR - if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and - tokenizer.TryConsume('[')): - type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer) - tokenizer.Consume(']') - tokenizer.TryConsume(':') - if tokenizer.TryConsume('<'): - expanded_any_end_token = '>' - else: - tokenizer.Consume('{') - expanded_any_end_token = '}' - expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name, - self.descriptor_pool) - if not expanded_any_sub_message: - raise ParseError('Type %s not found in descriptor pool' % - packed_type_name) - while not tokenizer.TryConsume(expanded_any_end_token): - if tokenizer.AtEnd(): - raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % - (expanded_any_end_token,)) - self._MergeField(tokenizer, expanded_any_sub_message) - deterministic = False - - message.Pack(expanded_any_sub_message, - type_url_prefix=type_url_prefix, - deterministic=deterministic) - return - - if tokenizer.TryConsume('['): - name = [tokenizer.ConsumeIdentifier()] - while tokenizer.TryConsume('.'): - name.append(tokenizer.ConsumeIdentifier()) - name = '.'.join(name) - - if not message_descriptor.is_extendable: - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" does not have extensions.' % - message_descriptor.full_name) - # pylint: disable=protected-access - field = message.Extensions._FindExtensionByName(name) - # pylint: enable=protected-access - - - if not field: - if self.allow_unknown_extension: - field = None - else: - raise tokenizer.ParseErrorPreviousToken( - 'Extension "%s" not registered. ' - 'Did you import the _pb2 module which defines it? ' - 'If you are trying to place the extension in the MessageSet ' - 'field of another message that is in an Any or MessageSet field, ' - 'that message\'s _pb2 module must be imported as well' % name) - elif message_descriptor != field.containing_type: - raise tokenizer.ParseErrorPreviousToken( - 'Extension "%s" does not extend message type "%s".' % - (name, message_descriptor.full_name)) - - tokenizer.Consume(']') - - else: - name = tokenizer.ConsumeIdentifierOrNumber() - if self.allow_field_number and name.isdigit(): - number = ParseInteger(name, True, True) - field = message_descriptor.fields_by_number.get(number, None) - if not field and message_descriptor.is_extendable: - field = message.Extensions._FindExtensionByNumber(number) - else: - field = message_descriptor.fields_by_name.get(name, None) - - # Group names are expected to be capitalized as they appear in the - # .proto file, which actually matches their type names, not their field - # names. - if not field: - field = message_descriptor.fields_by_name.get(name.lower(), None) - if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP: - field = None - - if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and - field.message_type.name != name): - field = None - - if not field and not self.allow_unknown_field: - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" has no field named "%s".' % - (message_descriptor.full_name, name)) - - if field: - if not self._allow_multiple_scalars and field.containing_oneof: - # Check if there's a different field set in this oneof. - # Note that we ignore the case if the same field was set before, and we - # apply _allow_multiple_scalars to non-scalar fields as well. - which_oneof = message.WhichOneof(field.containing_oneof.name) - if which_oneof is not None and which_oneof != field.name: - raise tokenizer.ParseErrorPreviousToken( - 'Field "%s" is specified along with field "%s", another member ' - 'of oneof "%s" for message type "%s".' % - (field.name, which_oneof, field.containing_oneof.name, - message_descriptor.full_name)) - - if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - tokenizer.TryConsume(':') - merger = self._MergeMessageField - else: - tokenizer.Consume(':') - merger = self._MergeScalarField - - if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and - tokenizer.TryConsume('[')): - # Short repeated format, e.g. "foo: [1, 2, 3]" - if not tokenizer.TryConsume(']'): - while True: - merger(tokenizer, message, field) - if tokenizer.TryConsume(']'): - break - tokenizer.Consume(',') - - else: - merger(tokenizer, message, field) - - else: # Proto field is unknown. - assert (self.allow_unknown_extension or self.allow_unknown_field) - _SkipFieldContents(tokenizer) - - # For historical reasons, fields may optionally be separated by commas or - # semicolons. - if not tokenizer.TryConsume(','): - tokenizer.TryConsume(';') - - - def _ConsumeAnyTypeUrl(self, tokenizer): - """Consumes a google.protobuf.Any type URL and returns the type name.""" - # Consume "type.googleapis.com/". - prefix = [tokenizer.ConsumeIdentifier()] - tokenizer.Consume('.') - prefix.append(tokenizer.ConsumeIdentifier()) - tokenizer.Consume('.') - prefix.append(tokenizer.ConsumeIdentifier()) - tokenizer.Consume('/') - # Consume the fully-qualified type name. - name = [tokenizer.ConsumeIdentifier()] - while tokenizer.TryConsume('.'): - name.append(tokenizer.ConsumeIdentifier()) - return '.'.join(prefix), '.'.join(name) - - def _MergeMessageField(self, tokenizer, message, field): - """Merges a single scalar field into a message. - - Args: - tokenizer: A tokenizer to parse the field value. - message: The message of which field is a member. - field: The descriptor of the field to be merged. - - Raises: - ParseError: In case of text parsing problems. - """ - is_map_entry = _IsMapEntry(field) - - if tokenizer.TryConsume('<'): - end_token = '>' - else: - tokenizer.Consume('{') - end_token = '}' - - if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - if field.is_extension: - sub_message = message.Extensions[field].add() - elif is_map_entry: - sub_message = getattr(message, field.name).GetEntryClass()() - else: - sub_message = getattr(message, field.name).add() - else: - if field.is_extension: - if (not self._allow_multiple_scalars and - message.HasExtension(field)): - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" extensions.' % - (message.DESCRIPTOR.full_name, field.full_name)) - sub_message = message.Extensions[field] - else: - # Also apply _allow_multiple_scalars to message field. - # TODO(jieluo): Change to _allow_singular_overwrites. - if (not self._allow_multiple_scalars and - message.HasField(field.name)): - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" fields.' % - (message.DESCRIPTOR.full_name, field.name)) - sub_message = getattr(message, field.name) - sub_message.SetInParent() - - while not tokenizer.TryConsume(end_token): - if tokenizer.AtEnd(): - raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,)) - self._MergeField(tokenizer, sub_message) - - if is_map_entry: - value_cpptype = field.message_type.fields_by_name['value'].cpp_type - if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: - value = getattr(message, field.name)[sub_message.key] - value.CopyFrom(sub_message.value) - else: - getattr(message, field.name)[sub_message.key] = sub_message.value - - @staticmethod - def _IsProto3Syntax(message): - message_descriptor = message.DESCRIPTOR - return (hasattr(message_descriptor, 'syntax') and - message_descriptor.syntax == 'proto3') - - def _MergeScalarField(self, tokenizer, message, field): - """Merges a single scalar field into a message. - - Args: - tokenizer: A tokenizer to parse the field value. - message: A protocol message to record the data. - field: The descriptor of the field to be merged. - - Raises: - ParseError: In case of text parsing problems. - RuntimeError: On runtime errors. - """ - _ = self.allow_unknown_extension - value = None - - if field.type in (descriptor.FieldDescriptor.TYPE_INT32, - descriptor.FieldDescriptor.TYPE_SINT32, - descriptor.FieldDescriptor.TYPE_SFIXED32): - value = _ConsumeInt32(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_INT64, - descriptor.FieldDescriptor.TYPE_SINT64, - descriptor.FieldDescriptor.TYPE_SFIXED64): - value = _ConsumeInt64(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32, - descriptor.FieldDescriptor.TYPE_FIXED32): - value = _ConsumeUint32(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64, - descriptor.FieldDescriptor.TYPE_FIXED64): - value = _ConsumeUint64(tokenizer) - elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT, - descriptor.FieldDescriptor.TYPE_DOUBLE): - value = tokenizer.ConsumeFloat() - elif field.type == descriptor.FieldDescriptor.TYPE_BOOL: - value = tokenizer.ConsumeBool() - elif field.type == descriptor.FieldDescriptor.TYPE_STRING: - value = tokenizer.ConsumeString() - elif field.type == descriptor.FieldDescriptor.TYPE_BYTES: - value = tokenizer.ConsumeByteString() - elif field.type == descriptor.FieldDescriptor.TYPE_ENUM: - value = tokenizer.ConsumeEnum(field) - else: - raise RuntimeError('Unknown field type %d' % field.type) - - if field.label == descriptor.FieldDescriptor.LABEL_REPEATED: - if field.is_extension: - message.Extensions[field].append(value) - else: - getattr(message, field.name).append(value) - else: - if field.is_extension: - if (not self._allow_multiple_scalars and - not self._IsProto3Syntax(message) and - message.HasExtension(field)): - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" extensions.' % - (message.DESCRIPTOR.full_name, field.full_name)) - else: - message.Extensions[field] = value - else: - duplicate_error = False - if not self._allow_multiple_scalars: - if self._IsProto3Syntax(message): - # Proto3 doesn't represent presence so we try best effort to check - # multiple scalars by compare to default values. - duplicate_error = bool(getattr(message, field.name)) - else: - duplicate_error = message.HasField(field.name) - - if duplicate_error: - raise tokenizer.ParseErrorPreviousToken( - 'Message type "%s" should not have multiple "%s" fields.' % - (message.DESCRIPTOR.full_name, field.name)) - else: - setattr(message, field.name, value) - - -def _SkipFieldContents(tokenizer): - """Skips over contents (value or message) of a field. - - Args: - tokenizer: A tokenizer to parse the field name and values. - """ - # Try to guess the type of this field. - # If this field is not a message, there should be a ":" between the - # field name and the field value and also the field value should not - # start with "{" or "<" which indicates the beginning of a message body. - # If there is no ":" or there is a "{" or "<" after ":", this field has - # to be a message or the input is ill-formed. - if tokenizer.TryConsume(':') and not tokenizer.LookingAt( - '{') and not tokenizer.LookingAt('<'): - _SkipFieldValue(tokenizer) - else: - _SkipFieldMessage(tokenizer) - - -def _SkipField(tokenizer): - """Skips over a complete field (name and value/message). - - Args: - tokenizer: A tokenizer to parse the field name and values. - """ - if tokenizer.TryConsume('['): - # Consume extension name. - tokenizer.ConsumeIdentifier() - while tokenizer.TryConsume('.'): - tokenizer.ConsumeIdentifier() - tokenizer.Consume(']') - else: - tokenizer.ConsumeIdentifierOrNumber() - - _SkipFieldContents(tokenizer) - - # For historical reasons, fields may optionally be separated by commas or - # semicolons. - if not tokenizer.TryConsume(','): - tokenizer.TryConsume(';') - - -def _SkipFieldMessage(tokenizer): - """Skips over a field message. - - Args: - tokenizer: A tokenizer to parse the field name and values. - """ - - if tokenizer.TryConsume('<'): - delimiter = '>' - else: - tokenizer.Consume('{') - delimiter = '}' - - while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'): - _SkipField(tokenizer) - - tokenizer.Consume(delimiter) - - -def _SkipFieldValue(tokenizer): - """Skips over a field value. - - Args: - tokenizer: A tokenizer to parse the field name and values. - - Raises: - ParseError: In case an invalid field value is found. - """ - # String/bytes tokens can come in multiple adjacent string literals. - # If we can consume one, consume as many as we can. - if tokenizer.TryConsumeByteString(): - while tokenizer.TryConsumeByteString(): - pass - return - - if (not tokenizer.TryConsumeIdentifier() and - not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and - not tokenizer.TryConsumeFloat()): - raise ParseError('Invalid field value: ' + tokenizer.token) - - -class Tokenizer(object): - """Protocol buffer text representation tokenizer. - - This class handles the lower level string parsing by splitting it into - meaningful tokens. - - It was directly ported from the Java protocol buffer API. - """ - - _WHITESPACE = re.compile(r'\s+') - _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE) - _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE) - _TOKEN = re.compile('|'.join([ - r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier - r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number - ] + [ # quoted str for each quote mark - # Avoid backtracking! https://stackoverflow.com/a/844267 - r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark) - for mark in _QUOTES - ])) - - _IDENTIFIER = re.compile(r'[^\d\W]\w*') - _IDENTIFIER_OR_NUMBER = re.compile(r'\w+') - - def __init__(self, lines, skip_comments=True): - self._position = 0 - self._line = -1 - self._column = 0 - self._token_start = None - self.token = '' - self._lines = iter(lines) - self._current_line = '' - self._previous_line = 0 - self._previous_column = 0 - self._more_lines = True - self._skip_comments = skip_comments - self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT - or self._WHITESPACE) - self._SkipWhitespace() - self.NextToken() - - def LookingAt(self, token): - return self.token == token - - def AtEnd(self): - """Checks the end of the text was reached. - - Returns: - True iff the end was reached. - """ - return not self.token - - def _PopLine(self): - while len(self._current_line) <= self._column: - try: - self._current_line = next(self._lines) - except StopIteration: - self._current_line = '' - self._more_lines = False - return - else: - self._line += 1 - self._column = 0 - - def _SkipWhitespace(self): - while True: - self._PopLine() - match = self._whitespace_pattern.match(self._current_line, self._column) - if not match: - break - length = len(match.group(0)) - self._column += length - - def TryConsume(self, token): - """Tries to consume a given piece of text. - - Args: - token: Text to consume. - - Returns: - True iff the text was consumed. - """ - if self.token == token: - self.NextToken() - return True - return False - - def Consume(self, token): - """Consumes a piece of text. - - Args: - token: Text to consume. - - Raises: - ParseError: If the text couldn't be consumed. - """ - if not self.TryConsume(token): - raise self.ParseError('Expected "%s".' % token) - - def ConsumeComment(self): - result = self.token - if not self._COMMENT.match(result): - raise self.ParseError('Expected comment.') - self.NextToken() - return result - - def ConsumeCommentOrTrailingComment(self): - """Consumes a comment, returns a 2-tuple (trailing bool, comment str).""" - - # Tokenizer initializes _previous_line and _previous_column to 0. As the - # tokenizer starts, it looks like there is a previous token on the line. - just_started = self._line == 0 and self._column == 0 - - before_parsing = self._previous_line - comment = self.ConsumeComment() - - # A trailing comment is a comment on the same line than the previous token. - trailing = (self._previous_line == before_parsing - and not just_started) - - return trailing, comment - - def TryConsumeIdentifier(self): - try: - self.ConsumeIdentifier() - return True - except ParseError: - return False - - def ConsumeIdentifier(self): - """Consumes protocol message field identifier. - - Returns: - Identifier string. - - Raises: - ParseError: If an identifier couldn't be consumed. - """ - result = self.token - if not self._IDENTIFIER.match(result): - raise self.ParseError('Expected identifier.') - self.NextToken() - return result - - def TryConsumeIdentifierOrNumber(self): - try: - self.ConsumeIdentifierOrNumber() - return True - except ParseError: - return False - - def ConsumeIdentifierOrNumber(self): - """Consumes protocol message field identifier. - - Returns: - Identifier string. - - Raises: - ParseError: If an identifier couldn't be consumed. - """ - result = self.token - if not self._IDENTIFIER_OR_NUMBER.match(result): - raise self.ParseError('Expected identifier or number, got %s.' % result) - self.NextToken() - return result - - def TryConsumeInteger(self): - try: - self.ConsumeInteger() - return True - except ParseError: - return False - - def ConsumeInteger(self): - """Consumes an integer number. - - Returns: - The integer parsed. - - Raises: - ParseError: If an integer couldn't be consumed. - """ - try: - result = _ParseAbstractInteger(self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def TryConsumeFloat(self): - try: - self.ConsumeFloat() - return True - except ParseError: - return False - - def ConsumeFloat(self): - """Consumes an floating point number. - - Returns: - The number parsed. - - Raises: - ParseError: If a floating point number couldn't be consumed. - """ - try: - result = ParseFloat(self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def ConsumeBool(self): - """Consumes a boolean value. - - Returns: - The bool parsed. - - Raises: - ParseError: If a boolean value couldn't be consumed. - """ - try: - result = ParseBool(self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def TryConsumeByteString(self): - try: - self.ConsumeByteString() - return True - except ParseError: - return False - - def ConsumeString(self): - """Consumes a string value. - - Returns: - The string parsed. - - Raises: - ParseError: If a string value couldn't be consumed. - """ - the_bytes = self.ConsumeByteString() - try: - return str(the_bytes, 'utf-8') - except UnicodeDecodeError as e: - raise self._StringParseError(e) - - def ConsumeByteString(self): - """Consumes a byte array value. - - Returns: - The array parsed (as a string). - - Raises: - ParseError: If a byte array value couldn't be consumed. - """ - the_list = [self._ConsumeSingleByteString()] - while self.token and self.token[0] in _QUOTES: - the_list.append(self._ConsumeSingleByteString()) - return b''.join(the_list) - - def _ConsumeSingleByteString(self): - """Consume one token of a string literal. - - String literals (whether bytes or text) can come in multiple adjacent - tokens which are automatically concatenated, like in C or Python. This - method only consumes one token. - - Returns: - The token parsed. - Raises: - ParseError: When the wrong format data is found. - """ - text = self.token - if len(text) < 1 or text[0] not in _QUOTES: - raise self.ParseError('Expected string but found: %r' % (text,)) - - if len(text) < 2 or text[-1] != text[0]: - raise self.ParseError('String missing ending quote: %r' % (text,)) - - try: - result = text_encoding.CUnescape(text[1:-1]) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def ConsumeEnum(self, field): - try: - result = ParseEnum(field, self.token) - except ValueError as e: - raise self.ParseError(str(e)) - self.NextToken() - return result - - def ParseErrorPreviousToken(self, message): - """Creates and *returns* a ParseError for the previously read token. - - Args: - message: A message to set for the exception. - - Returns: - A ParseError instance. - """ - return ParseError(message, self._previous_line + 1, - self._previous_column + 1) - - def ParseError(self, message): - """Creates and *returns* a ParseError for the current token.""" - return ParseError('\'' + self._current_line + '\': ' + message, - self._line + 1, self._column + 1) - - def _StringParseError(self, e): - return self.ParseError('Couldn\'t parse string: ' + str(e)) - - def NextToken(self): - """Reads the next meaningful token.""" - self._previous_line = self._line - self._previous_column = self._column - - self._column += len(self.token) - self._SkipWhitespace() - - if not self._more_lines: - self.token = '' - return - - match = self._TOKEN.match(self._current_line, self._column) - if not match and not self._skip_comments: - match = self._COMMENT.match(self._current_line, self._column) - if match: - token = match.group(0) - self.token = token - else: - self.token = self._current_line[self._column] - -# Aliased so it can still be accessed by current visibility violators. -# TODO(dbarnett): Migrate violators to textformat_tokenizer. -_Tokenizer = Tokenizer # pylint: disable=invalid-name - - -def _ConsumeInt32(tokenizer): - """Consumes a signed 32bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If a signed 32bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=True, is_long=False) - - -def _ConsumeUint32(tokenizer): - """Consumes an unsigned 32bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If an unsigned 32bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=False, is_long=False) - - -def _TryConsumeInt64(tokenizer): - try: - _ConsumeInt64(tokenizer) - return True - except ParseError: - return False - - -def _ConsumeInt64(tokenizer): - """Consumes a signed 32bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If a signed 32bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=True, is_long=True) - - -def _TryConsumeUint64(tokenizer): - try: - _ConsumeUint64(tokenizer) - return True - except ParseError: - return False - - -def _ConsumeUint64(tokenizer): - """Consumes an unsigned 64bit integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - - Returns: - The integer parsed. - - Raises: - ParseError: If an unsigned 64bit integer couldn't be consumed. - """ - return _ConsumeInteger(tokenizer, is_signed=False, is_long=True) - - -def _ConsumeInteger(tokenizer, is_signed=False, is_long=False): - """Consumes an integer number from tokenizer. - - Args: - tokenizer: A tokenizer used to parse the number. - is_signed: True if a signed integer must be parsed. - is_long: True if a long integer must be parsed. - - Returns: - The integer parsed. - - Raises: - ParseError: If an integer with given characteristics couldn't be consumed. - """ - try: - result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long) - except ValueError as e: - raise tokenizer.ParseError(str(e)) - tokenizer.NextToken() - return result - - -def ParseInteger(text, is_signed=False, is_long=False): - """Parses an integer. - - Args: - text: The text to parse. - is_signed: True if a signed integer must be parsed. - is_long: True if a long integer must be parsed. - - Returns: - The integer value. - - Raises: - ValueError: Thrown Iff the text is not a valid integer. - """ - # Do the actual parsing. Exception handling is propagated to caller. - result = _ParseAbstractInteger(text) - - # Check if the integer is sane. Exceptions handled by callers. - checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)] - checker.CheckValue(result) - return result - - -def _ParseAbstractInteger(text): - """Parses an integer without checking size/signedness. - - Args: - text: The text to parse. - - Returns: - The integer value. - - Raises: - ValueError: Thrown Iff the text is not a valid integer. - """ - # Do the actual parsing. Exception handling is propagated to caller. - orig_text = text - c_octal_match = re.match(r'(-?)0(\d+)$', text) - if c_octal_match: - # Python 3 no longer supports 0755 octal syntax without the 'o', so - # we always use the '0o' prefix for multi-digit numbers starting with 0. - text = c_octal_match.group(1) + '0o' + c_octal_match.group(2) - try: - return int(text, 0) - except ValueError: - raise ValueError('Couldn\'t parse integer: %s' % orig_text) - - -def ParseFloat(text): - """Parse a floating point number. - - Args: - text: Text to parse. - - Returns: - The number parsed. - - Raises: - ValueError: If a floating point number couldn't be parsed. - """ - try: - # Assume Python compatible syntax. - return float(text) - except ValueError: - # Check alternative spellings. - if _FLOAT_INFINITY.match(text): - if text[0] == '-': - return float('-inf') - else: - return float('inf') - elif _FLOAT_NAN.match(text): - return float('nan') - else: - # assume '1.0f' format - try: - return float(text.rstrip('f')) - except ValueError: - raise ValueError('Couldn\'t parse float: %s' % text) - - -def ParseBool(text): - """Parse a boolean value. - - Args: - text: Text to parse. - - Returns: - Boolean values parsed - - Raises: - ValueError: If text is not a valid boolean. - """ - if text in ('true', 't', '1', 'True'): - return True - elif text in ('false', 'f', '0', 'False'): - return False - else: - raise ValueError('Expected "true" or "false".') - - -def ParseEnum(field, value): - """Parse an enum value. - - The value can be specified by a number (the enum value), or by - a string literal (the enum name). - - Args: - field: Enum field descriptor. - value: String value. - - Returns: - Enum value number. - - Raises: - ValueError: If the enum value could not be parsed. - """ - enum_descriptor = field.enum_type - try: - number = int(value, 0) - except ValueError: - # Identifier. - enum_value = enum_descriptor.values_by_name.get(value, None) - if enum_value is None: - raise ValueError('Enum type "%s" has no value named %s.' % - (enum_descriptor.full_name, value)) - else: - # Numeric value. - if hasattr(field.file, 'syntax'): - # Attribute is checked for compatibility. - if field.file.syntax == 'proto3': - # Proto3 accept numeric unknown enums. - return number - enum_value = enum_descriptor.values_by_number.get(number, None) - if enum_value is None: - raise ValueError('Enum type "%s" has no value with number %d.' % - (enum_descriptor.full_name, number)) - return enum_value.number diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/timestamp_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/timestamp_pb2.py deleted file mode 100644 index 558d496941..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/timestamp_pb2.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/timestamp.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _TIMESTAMP._serialized_start=52 - _TIMESTAMP._serialized_end=95 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/type_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/type_pb2.py deleted file mode 100644 index 19903fb6b4..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/type_pb2.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/type.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _SYNTAX._serialized_start=1413 - _SYNTAX._serialized_end=1459 - _TYPE._serialized_start=113 - _TYPE._serialized_end=328 - _FIELD._serialized_start=331 - _FIELD._serialized_end=1056 - _FIELD_KIND._serialized_start=610 - _FIELD_KIND._serialized_end=938 - _FIELD_CARDINALITY._serialized_start=940 - _FIELD_CARDINALITY._serialized_end=1056 - _ENUM._serialized_start=1059 - _ENUM._serialized_end=1265 - _ENUMVALUE._serialized_start=1267 - _ENUMVALUE._serialized_end=1350 - _OPTION._serialized_start=1352 - _OPTION._serialized_end=1411 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/__init__.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_pb2.py deleted file mode 100644 index 66a5836c82..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_pb2.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/util/json_format.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) - - DESCRIPTOR._options = None - _TESTBOOLMAP_BOOLMAPENTRY._options = None - _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' - _TESTSTRINGMAP_STRINGMAPENTRY._options = None - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' - _ENUMVALUE._serialized_start=1607 - _ENUMVALUE._serialized_end=1657 - _TESTFLAGSANDSTRINGS._serialized_start=62 - _TESTFLAGSANDSTRINGS._serialized_end=199 - _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 - _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 - _TESTBASE64BYTEARRAYS._serialized_start=201 - _TESTBASE64BYTEARRAYS._serialized_end=234 - _TESTJAVASCRIPTJSON._serialized_start=236 - _TESTJAVASCRIPTJSON._serialized_end=307 - _TESTJAVASCRIPTORDERJSON1._serialized_start=309 - _TESTJAVASCRIPTORDERJSON1._serialized_end=390 - _TESTJAVASCRIPTORDERJSON2._serialized_start=393 - _TESTJAVASCRIPTORDERJSON2._serialized_end=530 - _TESTLARGEINT._serialized_start=532 - _TESTLARGEINT._serialized_end=568 - _TESTNUMBERS._serialized_start=571 - _TESTNUMBERS._serialized_end=731 - _TESTNUMBERS_MYTYPE._serialized_start=691 - _TESTNUMBERS_MYTYPE._serialized_end=731 - _TESTCAMELCASE._serialized_start=733 - _TESTCAMELCASE._serialized_end=817 - _TESTBOOLMAP._serialized_start=819 - _TESTBOOLMAP._serialized_end=943 - _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 - _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 - _TESTRECURSION._serialized_start=945 - _TESTRECURSION._serialized_end=1024 - _TESTSTRINGMAP._serialized_start=1027 - _TESTSTRINGMAP._serialized_end=1161 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 - _TESTSTRINGSERIALIZER._serialized_start=1164 - _TESTSTRINGSERIALIZER._serialized_end=1360 - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 - _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 - _TESTMESSAGEWITHEXTENSION._serialized_start=1362 - _TESTMESSAGEWITHEXTENSION._serialized_end=1398 - _TESTEXTENSION._serialized_start=1400 - _TESTEXTENSION._serialized_end=1522 - _TESTDEFAULTENUMVALUE._serialized_start=1524 - _TESTDEFAULTENUMVALUE._serialized_end=1605 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py deleted file mode 100644 index 5498deafa9..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/util/json_format_proto3_pb2.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/util/json_format_proto3.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.protobuf import unittest_pb2 as google_dot_protobuf_dot_unittest__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-google/protobuf/util/json_format_proto3.proto\x12\x06proto3\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1egoogle/protobuf/unittest.proto\"\x1c\n\x0bMessageType\x12\r\n\x05value\x18\x01 \x01(\x05\"\x94\x05\n\x0bTestMessage\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x13\n\x0bint32_value\x18\x02 \x01(\x05\x12\x13\n\x0bint64_value\x18\x03 \x01(\x03\x12\x14\n\x0cuint32_value\x18\x04 \x01(\r\x12\x14\n\x0cuint64_value\x18\x05 \x01(\x04\x12\x13\n\x0b\x66loat_value\x18\x06 \x01(\x02\x12\x14\n\x0c\x64ouble_value\x18\x07 \x01(\x01\x12\x14\n\x0cstring_value\x18\x08 \x01(\t\x12\x13\n\x0b\x62ytes_value\x18\t \x01(\x0c\x12$\n\nenum_value\x18\n \x01(\x0e\x32\x10.proto3.EnumType\x12*\n\rmessage_value\x18\x0b \x01(\x0b\x32\x13.proto3.MessageType\x12\x1b\n\x13repeated_bool_value\x18\x15 \x03(\x08\x12\x1c\n\x14repeated_int32_value\x18\x16 \x03(\x05\x12\x1c\n\x14repeated_int64_value\x18\x17 \x03(\x03\x12\x1d\n\x15repeated_uint32_value\x18\x18 \x03(\r\x12\x1d\n\x15repeated_uint64_value\x18\x19 \x03(\x04\x12\x1c\n\x14repeated_float_value\x18\x1a \x03(\x02\x12\x1d\n\x15repeated_double_value\x18\x1b \x03(\x01\x12\x1d\n\x15repeated_string_value\x18\x1c \x03(\t\x12\x1c\n\x14repeated_bytes_value\x18\x1d \x03(\x0c\x12-\n\x13repeated_enum_value\x18\x1e \x03(\x0e\x32\x10.proto3.EnumType\x12\x33\n\x16repeated_message_value\x18\x1f \x03(\x0b\x32\x13.proto3.MessageType\"\x8c\x02\n\tTestOneof\x12\x1b\n\x11oneof_int32_value\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12oneof_string_value\x18\x02 \x01(\tH\x00\x12\x1b\n\x11oneof_bytes_value\x18\x03 \x01(\x0cH\x00\x12,\n\x10oneof_enum_value\x18\x04 \x01(\x0e\x32\x10.proto3.EnumTypeH\x00\x12\x32\n\x13oneof_message_value\x18\x05 \x01(\x0b\x32\x13.proto3.MessageTypeH\x00\x12\x36\n\x10oneof_null_value\x18\x06 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x42\r\n\x0boneof_value\"\xe1\x04\n\x07TestMap\x12.\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\x1c.proto3.TestMap.BoolMapEntry\x12\x30\n\tint32_map\x18\x02 \x03(\x0b\x32\x1d.proto3.TestMap.Int32MapEntry\x12\x30\n\tint64_map\x18\x03 \x03(\x0b\x32\x1d.proto3.TestMap.Int64MapEntry\x12\x32\n\nuint32_map\x18\x04 \x03(\x0b\x32\x1e.proto3.TestMap.Uint32MapEntry\x12\x32\n\nuint64_map\x18\x05 \x03(\x0b\x32\x1e.proto3.TestMap.Uint64MapEntry\x12\x32\n\nstring_map\x18\x06 \x03(\x0b\x32\x1e.proto3.TestMap.StringMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"\x85\x06\n\rTestNestedMap\x12\x34\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32\".proto3.TestNestedMap.BoolMapEntry\x12\x36\n\tint32_map\x18\x02 \x03(\x0b\x32#.proto3.TestNestedMap.Int32MapEntry\x12\x36\n\tint64_map\x18\x03 \x03(\x0b\x32#.proto3.TestNestedMap.Int64MapEntry\x12\x38\n\nuint32_map\x18\x04 \x03(\x0b\x32$.proto3.TestNestedMap.Uint32MapEntry\x12\x38\n\nuint64_map\x18\x05 \x03(\x0b\x32$.proto3.TestNestedMap.Uint64MapEntry\x12\x38\n\nstring_map\x18\x06 \x03(\x0b\x32$.proto3.TestNestedMap.StringMapEntry\x12\x32\n\x07map_map\x18\x07 \x03(\x0b\x32!.proto3.TestNestedMap.MapMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a/\n\rInt64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint32MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eUint64MapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x1a\x44\n\x0bMapMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12$\n\x05value\x18\x02 \x01(\x0b\x32\x15.proto3.TestNestedMap:\x02\x38\x01\"{\n\rTestStringMap\x12\x38\n\nstring_map\x18\x01 \x03(\x0b\x32$.proto3.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xee\x07\n\x0bTestWrapper\x12.\n\nbool_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x30\n\x0bint32_value\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0bint64_value\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x32\n\x0cuint32_value\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x32\n\x0cuint64_value\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x30\n\x0b\x66loat_value\x18\x06 \x01(\x0b\x32\x1b.google.protobuf.FloatValue\x12\x32\n\x0c\x64ouble_value\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x32\n\x0cstring_value\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x30\n\x0b\x62ytes_value\x18\t \x01(\x0b\x32\x1b.google.protobuf.BytesValue\x12\x37\n\x13repeated_bool_value\x18\x0b \x03(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x14repeated_int32_value\x18\x0c \x03(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x39\n\x14repeated_int64_value\x18\r \x03(\x0b\x32\x1b.google.protobuf.Int64Value\x12;\n\x15repeated_uint32_value\x18\x0e \x03(\x0b\x32\x1c.google.protobuf.UInt32Value\x12;\n\x15repeated_uint64_value\x18\x0f \x03(\x0b\x32\x1c.google.protobuf.UInt64Value\x12\x39\n\x14repeated_float_value\x18\x10 \x03(\x0b\x32\x1b.google.protobuf.FloatValue\x12;\n\x15repeated_double_value\x18\x11 \x03(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15repeated_string_value\x18\x12 \x03(\x0b\x32\x1c.google.protobuf.StringValue\x12\x39\n\x14repeated_bytes_value\x18\x13 \x03(\x0b\x32\x1b.google.protobuf.BytesValue\"n\n\rTestTimestamp\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.Timestamp\"k\n\x0cTestDuration\x12(\n\x05value\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x19.google.protobuf.Duration\":\n\rTestFieldMask\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"e\n\nTestStruct\x12&\n\x05value\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\x12/\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Struct\"\\\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12,\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x14.google.protobuf.Any\"b\n\tTestValue\x12%\n\x05value\x18\x01 \x01(\x0b\x32\x16.google.protobuf.Value\x12.\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Value\"n\n\rTestListValue\x12)\n\x05value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x32\n\x0erepeated_value\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.ListValue\"\x89\x01\n\rTestBoolValue\x12\x12\n\nbool_value\x18\x01 \x01(\x08\x12\x34\n\x08\x62ool_map\x18\x02 \x03(\x0b\x32\".proto3.TestBoolValue.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"+\n\x12TestCustomJsonName\x12\x15\n\x05value\x18\x01 \x01(\x05R\x06@value\"J\n\x0eTestExtensions\x12\x38\n\nextensions\x18\x01 \x01(\x0b\x32$.protobuf_unittest.TestAllExtensions\"\x84\x01\n\rTestEnumValue\x12%\n\x0b\x65num_value1\x18\x01 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value2\x18\x02 \x01(\x0e\x32\x10.proto3.EnumType\x12%\n\x0b\x65num_value3\x18\x03 \x01(\x0e\x32\x10.proto3.EnumType*\x1c\n\x08\x45numType\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x42,\n\x18\x63om.google.protobuf.utilB\x10JsonFormatProto3b\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_proto3_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\030com.google.protobuf.utilB\020JsonFormatProto3' - _TESTMAP_BOOLMAPENTRY._options = None - _TESTMAP_BOOLMAPENTRY._serialized_options = b'8\001' - _TESTMAP_INT32MAPENTRY._options = None - _TESTMAP_INT32MAPENTRY._serialized_options = b'8\001' - _TESTMAP_INT64MAPENTRY._options = None - _TESTMAP_INT64MAPENTRY._serialized_options = b'8\001' - _TESTMAP_UINT32MAPENTRY._options = None - _TESTMAP_UINT32MAPENTRY._serialized_options = b'8\001' - _TESTMAP_UINT64MAPENTRY._options = None - _TESTMAP_UINT64MAPENTRY._serialized_options = b'8\001' - _TESTMAP_STRINGMAPENTRY._options = None - _TESTMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_BOOLMAPENTRY._options = None - _TESTNESTEDMAP_BOOLMAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_INT32MAPENTRY._options = None - _TESTNESTEDMAP_INT32MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_INT64MAPENTRY._options = None - _TESTNESTEDMAP_INT64MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_UINT32MAPENTRY._options = None - _TESTNESTEDMAP_UINT32MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_UINT64MAPENTRY._options = None - _TESTNESTEDMAP_UINT64MAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_STRINGMAPENTRY._options = None - _TESTNESTEDMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTNESTEDMAP_MAPMAPENTRY._options = None - _TESTNESTEDMAP_MAPMAPENTRY._serialized_options = b'8\001' - _TESTSTRINGMAP_STRINGMAPENTRY._options = None - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' - _TESTBOOLVALUE_BOOLMAPENTRY._options = None - _TESTBOOLVALUE_BOOLMAPENTRY._serialized_options = b'8\001' - _ENUMTYPE._serialized_start=4849 - _ENUMTYPE._serialized_end=4877 - _MESSAGETYPE._serialized_start=277 - _MESSAGETYPE._serialized_end=305 - _TESTMESSAGE._serialized_start=308 - _TESTMESSAGE._serialized_end=968 - _TESTONEOF._serialized_start=971 - _TESTONEOF._serialized_end=1239 - _TESTMAP._serialized_start=1242 - _TESTMAP._serialized_end=1851 - _TESTMAP_BOOLMAPENTRY._serialized_start=1557 - _TESTMAP_BOOLMAPENTRY._serialized_end=1603 - _TESTMAP_INT32MAPENTRY._serialized_start=1605 - _TESTMAP_INT32MAPENTRY._serialized_end=1652 - _TESTMAP_INT64MAPENTRY._serialized_start=1654 - _TESTMAP_INT64MAPENTRY._serialized_end=1701 - _TESTMAP_UINT32MAPENTRY._serialized_start=1703 - _TESTMAP_UINT32MAPENTRY._serialized_end=1751 - _TESTMAP_UINT64MAPENTRY._serialized_start=1753 - _TESTMAP_UINT64MAPENTRY._serialized_end=1801 - _TESTMAP_STRINGMAPENTRY._serialized_start=1803 - _TESTMAP_STRINGMAPENTRY._serialized_end=1851 - _TESTNESTEDMAP._serialized_start=1854 - _TESTNESTEDMAP._serialized_end=2627 - _TESTNESTEDMAP_BOOLMAPENTRY._serialized_start=1557 - _TESTNESTEDMAP_BOOLMAPENTRY._serialized_end=1603 - _TESTNESTEDMAP_INT32MAPENTRY._serialized_start=1605 - _TESTNESTEDMAP_INT32MAPENTRY._serialized_end=1652 - _TESTNESTEDMAP_INT64MAPENTRY._serialized_start=1654 - _TESTNESTEDMAP_INT64MAPENTRY._serialized_end=1701 - _TESTNESTEDMAP_UINT32MAPENTRY._serialized_start=1703 - _TESTNESTEDMAP_UINT32MAPENTRY._serialized_end=1751 - _TESTNESTEDMAP_UINT64MAPENTRY._serialized_start=1753 - _TESTNESTEDMAP_UINT64MAPENTRY._serialized_end=1801 - _TESTNESTEDMAP_STRINGMAPENTRY._serialized_start=1803 - _TESTNESTEDMAP_STRINGMAPENTRY._serialized_end=1851 - _TESTNESTEDMAP_MAPMAPENTRY._serialized_start=2559 - _TESTNESTEDMAP_MAPMAPENTRY._serialized_end=2627 - _TESTSTRINGMAP._serialized_start=2629 - _TESTSTRINGMAP._serialized_end=2752 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=2704 - _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=2752 - _TESTWRAPPER._serialized_start=2755 - _TESTWRAPPER._serialized_end=3761 - _TESTTIMESTAMP._serialized_start=3763 - _TESTTIMESTAMP._serialized_end=3873 - _TESTDURATION._serialized_start=3875 - _TESTDURATION._serialized_end=3982 - _TESTFIELDMASK._serialized_start=3984 - _TESTFIELDMASK._serialized_end=4042 - _TESTSTRUCT._serialized_start=4044 - _TESTSTRUCT._serialized_end=4145 - _TESTANY._serialized_start=4147 - _TESTANY._serialized_end=4239 - _TESTVALUE._serialized_start=4241 - _TESTVALUE._serialized_end=4339 - _TESTLISTVALUE._serialized_start=4341 - _TESTLISTVALUE._serialized_end=4451 - _TESTBOOLVALUE._serialized_start=4454 - _TESTBOOLVALUE._serialized_end=4591 - _TESTBOOLVALUE_BOOLMAPENTRY._serialized_start=1557 - _TESTBOOLVALUE_BOOLMAPENTRY._serialized_end=1603 - _TESTCUSTOMJSONNAME._serialized_start=4593 - _TESTCUSTOMJSONNAME._serialized_end=4636 - _TESTEXTENSIONS._serialized_start=4638 - _TESTEXTENSIONS._serialized_end=4712 - _TESTENUMVALUE._serialized_start=4715 - _TESTENUMVALUE._serialized_end=4847 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/wrappers_pb2.py b/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/wrappers_pb2.py deleted file mode 100644 index e49eb4c15d..0000000000 --- a/server_addon/nuke/client/ayon_nuke/vendor/google/protobuf/wrappers_pb2.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/protobuf/wrappers.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' - _DOUBLEVALUE._serialized_start=51 - _DOUBLEVALUE._serialized_end=79 - _FLOATVALUE._serialized_start=81 - _FLOATVALUE._serialized_end=108 - _INT64VALUE._serialized_start=110 - _INT64VALUE._serialized_end=137 - _UINT64VALUE._serialized_start=139 - _UINT64VALUE._serialized_end=167 - _INT32VALUE._serialized_start=169 - _INT32VALUE._serialized_end=196 - _UINT32VALUE._serialized_start=198 - _UINT32VALUE._serialized_end=226 - _BOOLVALUE._serialized_start=228 - _BOOLVALUE._serialized_end=254 - _STRINGVALUE._serialized_start=256 - _STRINGVALUE._serialized_end=284 - _BYTESVALUE._serialized_start=286 - _BYTESVALUE._serialized_end=313 -# @@protoc_insertion_point(module_scope) diff --git a/server_addon/nuke/client/ayon_nuke/version.py b/server_addon/nuke/client/ayon_nuke/version.py deleted file mode 100644 index 2262afb410..0000000000 --- a/server_addon/nuke/client/ayon_nuke/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring AYON addon 'nuke' version.""" -__version__ = "0.2.3" diff --git a/server_addon/nuke/package.py b/server_addon/nuke/package.py deleted file mode 100644 index 7347d21b35..0000000000 --- a/server_addon/nuke/package.py +++ /dev/null @@ -1,10 +0,0 @@ -name = "nuke" -title = "Nuke" -version = "0.2.3" - -client_dir = "ayon_nuke" - -ayon_required_addons = { - "core": ">0.3.2", -} -ayon_compatible_addons = {} diff --git a/server_addon/nuke/server/__init__.py b/server_addon/nuke/server/__init__.py deleted file mode 100644 index 0806ea8e87..0000000000 --- a/server_addon/nuke/server/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Type, Any - -from ayon_server.addons import BaseServerAddon - -from .settings import ( - NukeSettings, - DEFAULT_VALUES, - convert_settings_overrides -) - - -class NukeAddon(BaseServerAddon): - settings_model: Type[NukeSettings] = NukeSettings - - async def get_default_settings(self): - settings_model_cls = self.get_settings_model() - return settings_model_cls(**DEFAULT_VALUES) - - async def convert_settings_overrides( - self, - source_version: str, - overrides: dict[str, Any], - ) -> dict[str, Any]: - convert_settings_overrides(source_version, overrides) - # Use super conversion - return await super().convert_settings_overrides( - source_version, overrides) diff --git a/server_addon/nuke/server/settings/__init__.py b/server_addon/nuke/server/settings/__init__.py deleted file mode 100644 index da79b947f7..0000000000 --- a/server_addon/nuke/server/settings/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -from .main import ( - NukeSettings, - DEFAULT_VALUES, -) -from .conversion import convert_settings_overrides - - -__all__ = ( - "NukeSettings", - "DEFAULT_VALUES", - - "convert_settings_overrides", -) diff --git a/server_addon/nuke/server/settings/common.py b/server_addon/nuke/server/settings/common.py deleted file mode 100644 index 2ddbc3ca26..0000000000 --- a/server_addon/nuke/server/settings/common.py +++ /dev/null @@ -1,195 +0,0 @@ -import json -from ayon_server.exceptions import BadRequestException -from ayon_server.settings import BaseSettingsModel, SettingsField -from ayon_server.types import ( - ColorRGBA_float, - ColorRGB_uint8 -) - - -def validate_json_dict(value): - if not value.strip(): - return "{}" - try: - converted_value = json.loads(value) - success = isinstance(converted_value, dict) - except json.JSONDecodeError: - success = False - - if not success: - raise BadRequestException( - "Environment's can't be parsed as json object" - ) - return value - - -class Vector2d(BaseSettingsModel): - _layout = "compact" - - x: float = SettingsField(1.0, title="X") - y: float = SettingsField(1.0, title="Y") - - -class Vector3d(BaseSettingsModel): - _layout = "compact" - - x: float = SettingsField(1.0, title="X") - y: float = SettingsField(1.0, title="Y") - z: float = SettingsField(1.0, title="Z") - - -class Box(BaseSettingsModel): - _layout = "compact" - - x: float = SettingsField(1.0, title="X") - y: float = SettingsField(1.0, title="Y") - r: float = SettingsField(1.0, title="R") - t: float = SettingsField(1.0, title="T") - - -def formatable_knob_type_enum(): - return [ - {"value": "text", "label": "Text"}, - {"value": "number", "label": "Number"}, - {"value": "decimal_number", "label": "Decimal number"}, - {"value": "2d_vector", "label": "2D vector"}, - # "3D vector" - ] - - -class Formatable(BaseSettingsModel): - _layout = "compact" - - template: str = SettingsField( - "", - placeholder="""{{key}} or {{key}};{{key}}""", - title="Template" - ) - to_type: str = SettingsField( - "Text", - title="To Knob type", - enum_resolver=formatable_knob_type_enum, - ) - - -knob_types_enum = [ - {"value": "text", "label": "Text"}, - {"value": "formatable", "label": "Formate from template"}, - {"value": "color_gui", "label": "Color GUI"}, - {"value": "boolean", "label": "Boolean"}, - {"value": "number", "label": "Number"}, - {"value": "decimal_number", "label": "Decimal number"}, - {"value": "vector_2d", "label": "2D vector"}, - {"value": "vector_3d", "label": "3D vector"}, - {"value": "color", "label": "Color"}, - {"value": "box", "label": "Box"}, - {"value": "expression", "label": "Expression"} -] - - -class KnobModel(BaseSettingsModel): - _layout = "expanded" - - type: str = SettingsField( - title="Type", - description="Switch between different knob types", - enum_resolver=lambda: knob_types_enum, - conditionalEnum=True - ) - - name: str = SettingsField( - title="Name", - placeholder="Name" - ) - text: str = SettingsField("", title="Value") - color_gui: ColorRGB_uint8 = SettingsField( - (0, 0, 255), - title="RGB Uint8", - ) - boolean: bool = SettingsField(False, title="Value") - number: int = SettingsField(0, title="Value") - decimal_number: float = SettingsField(0.0, title="Value") - vector_2d: Vector2d = SettingsField( - default_factory=Vector2d, - title="Value" - ) - vector_3d: Vector3d = SettingsField( - default_factory=Vector3d, - title="Value" - ) - color: ColorRGBA_float = SettingsField( - (0.0, 0.0, 1.0, 1.0), - title="RGBA Float" - ) - box: Box = SettingsField( - default_factory=Box, - title="Value" - ) - formatable: Formatable = SettingsField( - default_factory=Formatable, - title="Formatable" - ) - expression: str = SettingsField( - "", - title="Expression" - ) - - -colorspace_types_enum = [ - {"value": "colorspace", "label": "Use Colorspace"}, - {"value": "display_view", "label": "Use Display & View"}, -] - - -class DisplayAndViewProfileModel(BaseSettingsModel): - _layout = "expanded" - - display: str = SettingsField( - "", - title="Display", - description="What display to use", - ) - - view: str = SettingsField( - "", - title="View", - description=( - "What view to use. Anatomy context tokens can " - "be used to dynamically set the value." - ), - ) - - -class ColorspaceConfigurationModel(BaseSettingsModel): - _isGroup: bool = True - - enabled: bool = SettingsField( - False, - title="Enabled", - description=( - "Enable baking target (colorspace or display/view)." - ), - ) - - type: str = SettingsField( - "colorspace", - title="Target baking type", - description="Switch between different knob types", - enum_resolver=lambda: colorspace_types_enum, - conditionalEnum=True, - ) - - colorspace: str = SettingsField( - "", - title="Colorspace", - description=( - "What colorspace name to use. Anatomy context tokens can " - "be used to dynamically set the value." - ), - ) - - display_view: DisplayAndViewProfileModel = SettingsField( - title="Display & View", - description="What display & view to use", - default_factory=DisplayAndViewProfileModel, - ) diff --git a/server_addon/nuke/server/settings/conversion.py b/server_addon/nuke/server/settings/conversion.py deleted file mode 100644 index 2e9e07e354..0000000000 --- a/server_addon/nuke/server/settings/conversion.py +++ /dev/null @@ -1,143 +0,0 @@ -import re -from typing import Any - - -def _get_viewer_config_from_string(input_string): - """Convert string to display and viewer string - - Args: - input_string (str): string with viewer - - Raises: - IndexError: if more then one slash in input string - IndexError: if missing closing bracket - - Returns: - tuple[str]: display, viewer - """ - display = None - viewer = input_string - # check if () or / or \ in name - if "/" in viewer: - split = viewer.split("/") - - # rise if more then one column - if len(split) > 2: - raise IndexError( - "Viewer Input string is not correct. " - f"More then two `/` slashes! {input_string}" - ) - - viewer = split[1] - display = split[0] - elif "(" in viewer: - pattern = r"([\w\d\s\.\-]+).*[(](.*)[)]" - result_ = re.findall(pattern, viewer) - try: - result_ = result_.pop() - display = str(result_[1]).rstrip() - viewer = str(result_[0]).rstrip() - except IndexError as e: - raise IndexError( - "Viewer Input string is not correct. " - f"Missing bracket! {input_string}" - ) from e - - return (display, viewer) - - -def _convert_imageio_baking_0_2_3(overrides): - if "baking" not in overrides: - return - - baking_view_process = overrides["baking"].get("viewerProcess") - - if baking_view_process is None: - return - - display, view = _get_viewer_config_from_string(baking_view_process) - - overrides["baking_target"] = { - "enabled": True, - "type": "display_view", - "display_view": { - "display": display, - "view": view, - }, - } - - -def _convert_viewers_0_2_3(overrides): - if "viewer" not in overrides: - return - - viewer = overrides["viewer"] - - if "viewerProcess" in viewer: - viewer_process = viewer["viewerProcess"] - display, view = _get_viewer_config_from_string(viewer_process) - viewer.update({ - "display": display, - "view": view, - }) - if "output_transform" in viewer: - output_transform = viewer["output_transform"] - display, view = _get_viewer_config_from_string(output_transform) - overrides["monitor"] = { - "display": display, - "view": view, - } - - -def _convert_imageio_configs_0_2_3(overrides): - """Image IO settings had changed. - - 0.2.2. is the latest version using the old way. - """ - if "imageio" not in overrides: - return - - imageio_overrides = overrides["imageio"] - - _convert_imageio_baking_0_2_3(imageio_overrides) - _convert_viewers_0_2_3(imageio_overrides) - - -def _convert_extract_intermediate_files_0_2_3(publish_overrides): - """Extract intermediate files settings had changed. - - 0.2.2. is the latest version using the old way. - """ - # override can be either `display/view` or `view (display)` - if "ExtractReviewIntermediates" in publish_overrides: - extract_review_intermediates = publish_overrides[ - "ExtractReviewIntermediates"] - - for output in extract_review_intermediates.get("outputs", []): - if viewer_process_override := output.get("viewer_process_override"): - display, view = _get_viewer_config_from_string( - viewer_process_override) - - output["colorspace_override"] = { - "enabled": True, - "type": "display_view", - "display_view": { - "display": display, - "view": view, - }, - } - - -def _convert_publish_plugins(overrides): - if "publish" not in overrides: - return - _convert_extract_intermediate_files_0_2_3(overrides["publish"]) - - -def convert_settings_overrides( - source_version: str, - overrides: dict[str, Any], -) -> dict[str, Any]: - _convert_imageio_configs_0_2_3(overrides) - _convert_publish_plugins(overrides) - return overrides diff --git a/server_addon/nuke/server/settings/create_plugins.py b/server_addon/nuke/server/settings/create_plugins.py deleted file mode 100644 index e4a0f9c938..0000000000 --- a/server_addon/nuke/server/settings/create_plugins.py +++ /dev/null @@ -1,225 +0,0 @@ -from pydantic import validator -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - ensure_unique_names -) -from .common import KnobModel - - -def instance_attributes_enum(): - """Return create write instance attributes.""" - return [ - {"value": "reviewable", "label": "Reviewable"}, - {"value": "farm_rendering", "label": "Farm rendering"}, - {"value": "use_range_limit", "label": "Use range limit"}, - { - "value": "render_on_farm", - "label": "Render On Farm" - } - ] - - -class PrenodeModel(BaseSettingsModel): - name: str = SettingsField( - title="Node name" - ) - - nodeclass: str = SettingsField( - "", - title="Node class" - ) - dependent: str = SettingsField( - "", - title="Incoming dependency" - ) - - knobs: list[KnobModel] = SettingsField( - default_factory=list, - title="Knobs", - ) - - @validator("knobs") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class CreateWriteRenderModel(BaseSettingsModel): - temp_rendering_path_template: str = SettingsField( - title="Temporary rendering path template" - ) - default_variants: list[str] = SettingsField( - title="Default variants", - default_factory=list - ) - instance_attributes: list[str] = SettingsField( - default_factory=list, - enum_resolver=instance_attributes_enum, - title="Instance attributes" - ) - exposed_knobs: list[str] = SettingsField( - title="Write Node Exposed Knobs", - default_factory=list - ) - prenodes: list[PrenodeModel] = SettingsField( - default_factory=list, - title="Preceding nodes", - ) - - @validator("prenodes") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class CreateWritePrerenderModel(BaseSettingsModel): - temp_rendering_path_template: str = SettingsField( - title="Temporary rendering path template" - ) - default_variants: list[str] = SettingsField( - title="Default variants", - default_factory=list - ) - instance_attributes: list[str] = SettingsField( - default_factory=list, - enum_resolver=instance_attributes_enum, - title="Instance attributes" - ) - exposed_knobs: list[str] = SettingsField( - title="Write Node Exposed Knobs", - default_factory=list - ) - prenodes: list[PrenodeModel] = SettingsField( - default_factory=list, - title="Preceding nodes", - ) - - @validator("prenodes") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class CreateWriteImageModel(BaseSettingsModel): - temp_rendering_path_template: str = SettingsField( - title="Temporary rendering path template" - ) - default_variants: list[str] = SettingsField( - title="Default variants", - default_factory=list - ) - instance_attributes: list[str] = SettingsField( - default_factory=list, - enum_resolver=instance_attributes_enum, - title="Instance attributes" - ) - exposed_knobs: list[str] = SettingsField( - title="Write Node Exposed Knobs", - default_factory=list - ) - prenodes: list[PrenodeModel] = SettingsField( - default_factory=list, - title="Preceding nodes", - ) - - @validator("prenodes") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class CreatorPluginsSettings(BaseSettingsModel): - CreateWriteRender: CreateWriteRenderModel = SettingsField( - default_factory=CreateWriteRenderModel, - title="Create Write Render" - ) - CreateWritePrerender: CreateWritePrerenderModel = SettingsField( - default_factory=CreateWritePrerenderModel, - title="Create Write Prerender" - ) - CreateWriteImage: CreateWriteImageModel = SettingsField( - default_factory=CreateWriteImageModel, - title="Create Write Image" - ) - - -DEFAULT_CREATE_SETTINGS = { - "CreateWriteRender": { - "temp_rendering_path_template": "{work}/renders/nuke/{product[name]}/{product[name]}.{frame}.{ext}", - "default_variants": [ - "Main", - "Mask" - ], - "instance_attributes": [ - "reviewable", - "farm_rendering" - ], - "exposed_knobs": [], - "prenodes": [ - { - "name": "Reformat01", - "nodeclass": "Reformat", - "dependent": "", - "knobs": [ - { - "type": "text", - "name": "resize", - "text": "none" - }, - { - "type": "boolean", - "name": "black_outside", - "boolean": True - } - ] - } - ] - }, - "CreateWritePrerender": { - "temp_rendering_path_template": "{work}/renders/nuke/{product[name]}/{product[name]}.{frame}.{ext}", - "default_variants": [ - "Key01", - "Bg01", - "Fg01", - "Branch01", - "Part01" - ], - "instance_attributes": [ - "farm_rendering", - "use_range_limit" - ], - "exposed_knobs": [], - "prenodes": [] - }, - "CreateWriteImage": { - "temp_rendering_path_template": "{work}/renders/nuke/{product[name]}/{product[name]}.{ext}", - "default_variants": [ - "StillFrame", - "MPFrame", - "LayoutFrame" - ], - "instance_attributes": [ - "use_range_limit" - ], - "exposed_knobs": [], - "prenodes": [ - { - "name": "FrameHold01", - "nodeclass": "FrameHold", - "dependent": "", - "knobs": [ - { - "type": "expression", - "name": "first_frame", - "expression": "parent.first" - } - ] - } - ] - } -} diff --git a/server_addon/nuke/server/settings/dirmap.py b/server_addon/nuke/server/settings/dirmap.py deleted file mode 100644 index 3e1bac0739..0000000000 --- a/server_addon/nuke/server/settings/dirmap.py +++ /dev/null @@ -1,33 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class DirmapPathsSubmodel(BaseSettingsModel): - _layout = "compact" - source_path: list[str] = SettingsField( - default_factory=list, - title="Source Paths" - ) - destination_path: list[str] = SettingsField( - default_factory=list, - title="Destination Paths" - ) - - -class DirmapSettings(BaseSettingsModel): - """Nuke color management project settings.""" - _isGroup: bool = True - - enabled: bool = SettingsField(title="enabled") - paths: DirmapPathsSubmodel = SettingsField( - default_factory=DirmapPathsSubmodel, - title="Dirmap Paths" - ) - - -DEFAULT_DIRMAP_SETTINGS = { - "enabled": False, - "paths": { - "source_path": [], - "destination_path": [] - } -} diff --git a/server_addon/nuke/server/settings/general.py b/server_addon/nuke/server/settings/general.py deleted file mode 100644 index d54c725dc1..0000000000 --- a/server_addon/nuke/server/settings/general.py +++ /dev/null @@ -1,41 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class MenuShortcut(BaseSettingsModel): - """Nuke general project settings.""" - - create: str = SettingsField( - title="Create..." - ) - publish: str = SettingsField( - title="Publish..." - ) - load: str = SettingsField( - title="Load..." - ) - manage: str = SettingsField( - title="Manage..." - ) - build_workfile: str = SettingsField( - title="Build Workfile..." - ) - - -class GeneralSettings(BaseSettingsModel): - """Nuke general project settings.""" - - menu: MenuShortcut = SettingsField( - default_factory=MenuShortcut, - title="Menu Shortcuts", - ) - - -DEFAULT_GENERAL_SETTINGS = { - "menu": { - "create": "ctrl+alt+c", - "publish": "ctrl+alt+p", - "load": "ctrl+alt+l", - "manage": "ctrl+alt+m", - "build_workfile": "ctrl+alt+b" - } -} diff --git a/server_addon/nuke/server/settings/gizmo.py b/server_addon/nuke/server/settings/gizmo.py deleted file mode 100644 index ddb56f891c..0000000000 --- a/server_addon/nuke/server/settings/gizmo.py +++ /dev/null @@ -1,79 +0,0 @@ -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - MultiplatformPathModel, - MultiplatformPathListModel, -) - - -class SubGizmoItem(BaseSettingsModel): - title: str = SettingsField( - title="Label" - ) - sourcetype: str = SettingsField( - title="Type of usage" - ) - command: str = SettingsField( - title="Python command" - ) - icon: str = SettingsField( - title="Icon Path" - ) - shortcut: str = SettingsField( - title="Hotkey" - ) - - -class GizmoDefinitionItem(BaseSettingsModel): - gizmo_toolbar_path: str = SettingsField( - title="Gizmo Menu" - ) - sub_gizmo_list: list[SubGizmoItem] = SettingsField( - default_factory=list, title="Sub Gizmo List") - - -class GizmoItem(BaseSettingsModel): - """Nuke gizmo item """ - - toolbar_menu_name: str = SettingsField( - title="Toolbar Menu Name" - ) - gizmo_source_dir: MultiplatformPathListModel = SettingsField( - default_factory=MultiplatformPathListModel, - title="Gizmo Directory Path" - ) - toolbar_icon_path: MultiplatformPathModel = SettingsField( - default_factory=MultiplatformPathModel, - title="Toolbar Icon Path" - ) - gizmo_definition: list[GizmoDefinitionItem] = SettingsField( - default_factory=list, title="Gizmo Definition") - - -DEFAULT_GIZMO_ITEM = { - "toolbar_menu_name": "OpenPype Gizmo", - "gizmo_source_dir": { - "windows": [], - "darwin": [], - "linux": [] - }, - "toolbar_icon_path": { - "windows": "", - "darwin": "", - "linux": "" - }, - "gizmo_definition": [ - { - "gizmo_toolbar_path": "/path/to/menu", - "sub_gizmo_list": [ - { - "sourcetype": "python", - "title": "Gizmo Note", - "command": "nuke.nodes.StickyNote(label='You can create your own toolbar menu in the Nuke GizmoMenu of OpenPype')", - "icon": "", - "shortcut": "" - } - ] - } - ] -} diff --git a/server_addon/nuke/server/settings/imageio.py b/server_addon/nuke/server/settings/imageio.py deleted file mode 100644 index a34cb4ab05..0000000000 --- a/server_addon/nuke/server/settings/imageio.py +++ /dev/null @@ -1,354 +0,0 @@ -from typing import Literal -from pydantic import validator -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - ensure_unique_names, -) - -from .common import ( - KnobModel, - ColorspaceConfigurationModel, -) - - -class NodesModel(BaseSettingsModel): - _layout = "expanded" - plugins: list[str] = SettingsField( - default_factory=list, - title="Used in plugins" - ) - nuke_node_class: str = SettingsField( - title="Nuke Node Class", - ) - - -class RequiredNodesModel(NodesModel): - knobs: list[KnobModel] = SettingsField( - default_factory=list, - title="Knobs", - ) - - @validator("knobs") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class OverrideNodesModel(NodesModel): - subsets: list[str] = SettingsField( - default_factory=list, - title="Subsets" - ) - - knobs: list[KnobModel] = SettingsField( - default_factory=list, - title="Knobs", - ) - - @validator("knobs") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class NodesSetting(BaseSettingsModel): - _isGroup: bool = True - - required_nodes: list[RequiredNodesModel] = SettingsField( - title="Plugin required", - default_factory=list - ) - override_nodes: list[OverrideNodesModel] = SettingsField( - title="Plugin's node overrides", - default_factory=list - ) - - -def ocio_configs_switcher_enum(): - return [ - {"value": "nuke-default", "label": "nuke-default"}, - {"value": "spi-vfx", "label": "spi-vfx (11)"}, - {"value": "spi-anim", "label": "spi-anim (11)"}, - {"value": "aces_0.1.1", "label": "aces_0.1.1 (11)"}, - {"value": "aces_0.7.1", "label": "aces_0.7.1 (11)"}, - {"value": "aces_1.0.1", "label": "aces_1.0.1 (11)"}, - {"value": "aces_1.0.3", "label": "aces_1.0.3 (11, 12)"}, - {"value": "aces_1.1", "label": "aces_1.1 (12, 13)"}, - {"value": "aces_1.2", "label": "aces_1.2 (13, 14)"}, - {"value": "studio-config-v1.0.0_aces-v1.3_ocio-v2.1", - "label": "studio-config-v1.0.0_aces-v1.3_ocio-v2.1 (14)"}, - {"value": "cg-config-v1.0.0_aces-v1.3_ocio-v2.1", - "label": "cg-config-v1.0.0_aces-v1.3_ocio-v2.1 (14)"}, - ] - - -class WorkfileColorspaceSettings(BaseSettingsModel): - """Nuke workfile colorspace preset. """ - - _isGroup: bool = True - - color_management: Literal["Nuke", "OCIO"] = SettingsField( - title="Color Management Workflow" - ) - - native_ocio_config: str = SettingsField( - title="Native OpenColorIO Config", - description="Switch between native OCIO configs", - enum_resolver=ocio_configs_switcher_enum, - conditionalEnum=True - ) - - working_space: str = SettingsField( - title="Working Space" - ) - monitor_lut: str = SettingsField( - title="Thumbnails" - ) - monitor_out_lut: str = SettingsField( - title="Monitor Out" - ) - int_8_lut: str = SettingsField( - title="8-bit Files" - ) - int_16_lut: str = SettingsField( - title="16-bit Files" - ) - log_lut: str = SettingsField( - title="Log Files" - ) - float_lut: str = SettingsField( - title="Float Files" - ) - - -class ReadColorspaceRulesItems(BaseSettingsModel): - _layout = "expanded" - - regex: str = SettingsField("", title="Regex expression") - colorspace: str = SettingsField("", title="Colorspace") - - -class RegexInputsModel(BaseSettingsModel): - _isGroup: bool = True - - inputs: list[ReadColorspaceRulesItems] = SettingsField( - default_factory=list, - title="Inputs" - ) - - -class ViewProcessModel(BaseSettingsModel): - _isGroup: bool = True - - display: str = SettingsField( - "", - title="Display", - description="What display to use", - ) - view: str = SettingsField( - "", - title="View", - description=( - "What view to use. Anatomy context tokens can " - "be used to dynamically set the value." - ), - ) - - -class MonitorProcessModel(BaseSettingsModel): - _isGroup: bool = True - - display: str = SettingsField( - "", - title="Display", - description="What display to use", - ) - view: str = SettingsField( - "", - title="View", - description=( - "What view to use. Anatomy context tokens can " - "be used to dynamically set the value." - ), - ) - - -class ImageIOConfigModel(BaseSettingsModel): - """[DEPRECATED] Addon OCIO config settings. Please set the OCIO config - path in the Core addon profiles here - (ayon+settings://core/imageio/ocio_config_profiles). - """ - - override_global_config: bool = SettingsField( - False, - title="Override global OCIO config", - description=( - "DEPRECATED functionality. Please set the OCIO config path in the " - "Core addon profiles here (ayon+settings://core/imageio/" - "ocio_config_profiles)." - ), - ) - filepath: list[str] = SettingsField( - default_factory=list, - title="Config path", - description=( - "DEPRECATED functionality. Please set the OCIO config path in the " - "Core addon profiles here (ayon+settings://core/imageio/" - "ocio_config_profiles)." - ), - ) - - -class ImageIOFileRuleModel(BaseSettingsModel): - name: str = SettingsField("", title="Rule name") - pattern: str = SettingsField("", title="Regex pattern") - colorspace: str = SettingsField("", title="Colorspace name") - ext: str = SettingsField("", title="File extension") - - -class ImageIOFileRulesModel(BaseSettingsModel): - _isGroup: bool = True - - activate_host_rules: bool = SettingsField(False) - rules: list[ImageIOFileRuleModel] = SettingsField( - default_factory=list, - title="Rules" - ) - - @validator("rules") - def validate_unique_outputs(cls, value): - ensure_unique_names(value) - return value - - -class ImageIOSettings(BaseSettingsModel): - """Nuke color management project settings. """ - - activate_host_color_management: bool = SettingsField( - True, title="Enable Color Management") - ocio_config: ImageIOConfigModel = SettingsField( - default_factory=ImageIOConfigModel, - title="OCIO config" - ) - file_rules: ImageIOFileRulesModel = SettingsField( - default_factory=ImageIOFileRulesModel, - title="File Rules" - ) - viewer: ViewProcessModel = SettingsField( - default_factory=ViewProcessModel, - title="Viewer", - description="""Viewer profile is used during - Creation of new viewer node at knob viewerProcess""" - ) - monitor: MonitorProcessModel = SettingsField( - default_factory=MonitorProcessModel, - title="Monitor OUT" - ) - baking_target: ColorspaceConfigurationModel = SettingsField( - default_factory=ColorspaceConfigurationModel, - title="Baking Target Colorspace" - ) - - workfile: WorkfileColorspaceSettings = SettingsField( - default_factory=WorkfileColorspaceSettings, - title="Workfile" - ) - - nodes: NodesSetting = SettingsField( - default_factory=NodesSetting, - title="Nodes" - ) - """# TODO: enhance settings with host api: - - [ ] no need for `inputs` middle part. It can stay - directly on `regex_inputs` - """ - regex_inputs: RegexInputsModel = SettingsField( - default_factory=RegexInputsModel, - title="Assign colorspace to read nodes via rules" - ) - - -DEFAULT_IMAGEIO_SETTINGS = { - "viewer": {"display": "ACES", "view": "sRGB"}, - "monitor": {"display": "ACES", "view": "Rec.709"}, - "baking_target": { - "enabled": True, - "type": "colorspace", - "colorspace": "Output - Rec.709", - }, - "workfile": { - "color_management": "OCIO", - "native_ocio_config": "aces_1.2", - "working_space": "role_scene_linear", - "monitor_lut": "ACES/sRGB", - "monitor_out_lut": "ACES/sRGB", - "int_8_lut": "role_matte_paint", - "int_16_lut": "role_texture_paint", - "log_lut": "role_compositing_log", - "float_lut": "role_scene_linear", - }, - "nodes": { - "required_nodes": [ - { - "plugins": ["CreateWriteRender"], - "nuke_node_class": "Write", - "knobs": [ - {"type": "text", "name": "file_type", "text": "exr"}, - {"type": "text", "name": "datatype", "text": "16 bit half"}, - {"type": "text", "name": "compression", "text": "Zip (1 scanline)"}, - {"type": "boolean", "name": "autocrop", "boolean": True}, - { - "type": "color_gui", - "name": "tile_color", - "color_gui": [186, 35, 35], - }, - {"type": "text", "name": "channels", "text": "rgb"}, - {"type": "text", "name": "colorspace", "text": "scene_linear"}, - {"type": "boolean", "name": "create_directories", "boolean": True}, - ], - }, - { - "plugins": ["CreateWritePrerender"], - "nuke_node_class": "Write", - "knobs": [ - {"type": "text", "name": "file_type", "text": "exr"}, - {"type": "text", "name": "datatype", "text": "16 bit half"}, - {"type": "text", "name": "compression", "text": "Zip (1 scanline)"}, - {"type": "boolean", "name": "autocrop", "boolean": True}, - { - "type": "color_gui", - "name": "tile_color", - "color_gui": [171, 171, 10], - }, - {"type": "text", "name": "channels", "text": "rgb"}, - {"type": "text", "name": "colorspace", "text": "scene_linear"}, - {"type": "boolean", "name": "create_directories", "boolean": True}, - ], - }, - { - "plugins": ["CreateWriteImage"], - "nuke_node_class": "Write", - "knobs": [ - {"type": "text", "name": "file_type", "text": "tiff"}, - {"type": "text", "name": "datatype", "text": "16 bit"}, - {"type": "text", "name": "compression", "text": "Deflate"}, - { - "type": "color_gui", - "name": "tile_color", - "color_gui": [56, 162, 7], - }, - {"type": "text", "name": "channels", "text": "rgb"}, - {"type": "text", "name": "colorspace", "text": "texture_paint"}, - {"type": "boolean", "name": "create_directories", "boolean": True}, - ], - }, - ], - "override_nodes": [], - }, - "regex_inputs": { - "inputs": [{"regex": "(beauty).*(?=.exr)", "colorspace": "linear"}] - }, -} diff --git a/server_addon/nuke/server/settings/loader_plugins.py b/server_addon/nuke/server/settings/loader_plugins.py deleted file mode 100644 index 22cb469e8d..0000000000 --- a/server_addon/nuke/server/settings/loader_plugins.py +++ /dev/null @@ -1,74 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class LoadImageModel(BaseSettingsModel): - enabled: bool = SettingsField( - title="Enabled" - ) - representations_include: list[str] = SettingsField( - default_factory=list, - title="Include representations" - ) - - node_name_template: str = SettingsField( - title="Read node name template" - ) - - -class LoadClipOptionsModel(BaseSettingsModel): - start_at_workfile: bool = SettingsField( - title="Start at workfile's start frame" - ) - add_retime: bool = SettingsField( - title="Add retime" - ) - deep_exr: bool = SettingsField( - title="Deep Exr Read Node" - ) - -class LoadClipModel(BaseSettingsModel): - enabled: bool = SettingsField( - title="Enabled" - ) - representations_include: list[str] = SettingsField( - default_factory=list, - title="Include representations" - ) - - node_name_template: str = SettingsField( - title="Read node name template" - ) - options_defaults: LoadClipOptionsModel = SettingsField( - default_factory=LoadClipOptionsModel, - title="Loader option defaults" - ) - - -class LoaderPluginsModel(BaseSettingsModel): - LoadImage: LoadImageModel = SettingsField( - default_factory=LoadImageModel, - title="Load Image" - ) - LoadClip: LoadClipModel = SettingsField( - default_factory=LoadClipModel, - title="Load Clip" - ) - - -DEFAULT_LOADER_PLUGINS_SETTINGS = { - "LoadImage": { - "enabled": True, - "representations_include": [], - "node_name_template": "{class_name}_{ext}" - }, - "LoadClip": { - "enabled": True, - "representations_include": [], - "node_name_template": "{class_name}_{ext}", - "options_defaults": { - "start_at_workfile": True, - "add_retime": True, - "deep_exr": False - } - } -} diff --git a/server_addon/nuke/server/settings/main.py b/server_addon/nuke/server/settings/main.py deleted file mode 100644 index 1fd347cc21..0000000000 --- a/server_addon/nuke/server/settings/main.py +++ /dev/null @@ -1,112 +0,0 @@ -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, -) - -from .general import ( - GeneralSettings, - DEFAULT_GENERAL_SETTINGS -) -from .imageio import ( - ImageIOSettings, - DEFAULT_IMAGEIO_SETTINGS -) -from .dirmap import ( - DirmapSettings, - DEFAULT_DIRMAP_SETTINGS -) -from .scriptsmenu import ( - ScriptsmenuSettings, - DEFAULT_SCRIPTSMENU_SETTINGS -) -from .gizmo import ( - GizmoItem, - DEFAULT_GIZMO_ITEM -) -from .create_plugins import ( - CreatorPluginsSettings, - DEFAULT_CREATE_SETTINGS -) -from .publish_plugins import ( - PublishPluginsModel, - DEFAULT_PUBLISH_PLUGIN_SETTINGS -) -from .loader_plugins import ( - LoaderPluginsModel, - DEFAULT_LOADER_PLUGINS_SETTINGS -) -from .workfile_builder import ( - WorkfileBuilderModel, - DEFAULT_WORKFILE_BUILDER_SETTINGS -) -from .templated_workfile_build import ( - TemplatedWorkfileBuildModel -) - - -class NukeSettings(BaseSettingsModel): - """Nuke addon settings.""" - - general: GeneralSettings = SettingsField( - default_factory=GeneralSettings, - title="General", - ) - - imageio: ImageIOSettings = SettingsField( - default_factory=ImageIOSettings, - title="Color Management (imageio)", - ) - - dirmap: DirmapSettings = SettingsField( - default_factory=DirmapSettings, - title="Nuke Directory Mapping", - ) - - scriptsmenu: ScriptsmenuSettings = SettingsField( - default_factory=ScriptsmenuSettings, - title="Scripts Menu Definition", - ) - - gizmo: list[GizmoItem] = SettingsField( - default_factory=list, title="Gizmo Menu") - - create: CreatorPluginsSettings = SettingsField( - default_factory=CreatorPluginsSettings, - title="Creator Plugins", - ) - - publish: PublishPluginsModel = SettingsField( - default_factory=PublishPluginsModel, - title="Publish Plugins", - ) - - load: LoaderPluginsModel = SettingsField( - default_factory=LoaderPluginsModel, - title="Loader Plugins", - ) - - workfile_builder: WorkfileBuilderModel = SettingsField( - default_factory=WorkfileBuilderModel, - title="Workfile Builder", - ) - - templated_workfile_build: TemplatedWorkfileBuildModel = SettingsField( - title="Templated Workfile Build", - default_factory=TemplatedWorkfileBuildModel - ) - - -DEFAULT_VALUES = { - "general": DEFAULT_GENERAL_SETTINGS, - "imageio": DEFAULT_IMAGEIO_SETTINGS, - "dirmap": DEFAULT_DIRMAP_SETTINGS, - "scriptsmenu": DEFAULT_SCRIPTSMENU_SETTINGS, - "gizmo": [DEFAULT_GIZMO_ITEM], - "create": DEFAULT_CREATE_SETTINGS, - "publish": DEFAULT_PUBLISH_PLUGIN_SETTINGS, - "load": DEFAULT_LOADER_PLUGINS_SETTINGS, - "workfile_builder": DEFAULT_WORKFILE_BUILDER_SETTINGS, - "templated_workfile_build": { - "profiles": [] - } -} diff --git a/server_addon/nuke/server/settings/publish_plugins.py b/server_addon/nuke/server/settings/publish_plugins.py deleted file mode 100644 index c52c9e9c84..0000000000 --- a/server_addon/nuke/server/settings/publish_plugins.py +++ /dev/null @@ -1,412 +0,0 @@ -from pydantic import validator -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - ensure_unique_names, - task_types_enum -) -from .common import ( - KnobModel, - ColorspaceConfigurationModel, - validate_json_dict, -) - - -def nuke_render_publish_types_enum(): - """Return all nuke render families available in creators.""" - return [ - {"value": "render", "label": "Render"}, - {"value": "prerender", "label": "Prerender"}, - {"value": "image", "label": "Image"} - ] - - -def nuke_product_types_enum(): - """Return all nuke families available in creators.""" - return [ - {"value": "nukenodes", "label": "Nukenodes"}, - {"value": "model", "label": "Model"}, - {"value": "camera", "label": "Camera"}, - {"value": "gizmo", "label": "Gizmo"}, - {"value": "source", "label": "Source"} - ] + nuke_render_publish_types_enum() - - -class NodeModel(BaseSettingsModel): - name: str = SettingsField( - title="Node name" - ) - nodeclass: str = SettingsField( - "", - title="Node class" - ) - dependent: str = SettingsField( - "", - title="Incoming dependency" - ) - knobs: list[KnobModel] = SettingsField( - default_factory=list, - title="Knobs", - ) - - @validator("knobs") - def ensure_unique_names(cls, value): - """Ensure name fields within the lists have unique names.""" - ensure_unique_names(value) - return value - - -class CollectInstanceDataModel(BaseSettingsModel): - sync_workfile_version_on_product_types: list[str] = SettingsField( - default_factory=list, - enum_resolver=nuke_product_types_enum, - title="Product types" - ) - - -class OptionalPluginModel(BaseSettingsModel): - enabled: bool = SettingsField(True) - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - -class ValidateKnobsModel(BaseSettingsModel): - enabled: bool = SettingsField(title="Enabled") - knobs: str = SettingsField( - "{}", - title="Knobs", - widget="textarea", - ) - - @validator("knobs") - def validate_json(cls, value): - return validate_json_dict(value) - - -class ExtractReviewDataModel(BaseSettingsModel): - enabled: bool = SettingsField(title="Enabled") - - -class ExtractReviewDataLutModel(BaseSettingsModel): - enabled: bool = SettingsField(title="Enabled") - - -class BakingStreamFilterModel(BaseSettingsModel): - task_types: list[str] = SettingsField( - default_factory=list, - title="Task types", - enum_resolver=task_types_enum - ) - product_types: list[str] = SettingsField( - default_factory=list, - enum_resolver=nuke_render_publish_types_enum, - title="Sync workfile versions for familes" - ) - product_names: list[str] = SettingsField( - default_factory=list, title="Product names") - - -class ReformatNodesRepositionNodes(BaseSettingsModel): - node_class: str = SettingsField(title="Node class") - knobs: list[KnobModel] = SettingsField( - default_factory=list, - title="Node knobs") - - -class ReformatNodesConfigModel(BaseSettingsModel): - """Only reposition nodes supported. - - You can add multiple reformat nodes and set their knobs. - Order of reformat nodes is important. First reformat node will - be applied first and last reformat node will be applied last. - """ - enabled: bool = SettingsField(False) - reposition_nodes: list[ReformatNodesRepositionNodes] = SettingsField( - default_factory=list, - title="Reposition knobs" - ) - - -class IntermediateOutputModel(BaseSettingsModel): - name: str = SettingsField(title="Output name") - publish: bool = SettingsField(title="Publish") - filter: BakingStreamFilterModel = SettingsField( - title="Filter", default_factory=BakingStreamFilterModel) - read_raw: bool = SettingsField( - False, - title="Input read node RAW switch" - ) - bake_viewer_process: bool = SettingsField( - True, - title="Bake viewer process", - section="Baking target", - ) - colorspace_override: ColorspaceConfigurationModel = SettingsField( - title="Target baking colorspace override", - description="Override Baking target with colorspace or display/view", - default_factory=ColorspaceConfigurationModel - ) - bake_viewer_input_process: bool = SettingsField( - True, - title="Bake viewer input process node (LUT)", - section="Baking additional", - ) - reformat_nodes_config: ReformatNodesConfigModel = SettingsField( - default_factory=ReformatNodesConfigModel, - title="Reformat Nodes") - extension: str = SettingsField( - "mov", - title="File extension" - ) - add_custom_tags: list[str] = SettingsField( - title="Custom tags", default_factory=list) - - -class ExtractReviewIntermediatesModel(BaseSettingsModel): - enabled: bool = SettingsField(title="Enabled") - viewer_lut_raw: bool = SettingsField(title="Viewer lut raw") - outputs: list[IntermediateOutputModel] = SettingsField( - default_factory=list, - title="Baking streams" - ) - - -class FSubmissionNoteModel(BaseSettingsModel): - enabled: bool = SettingsField(title="enabled") - template: str = SettingsField(title="Template") - - -class FSubmistingForModel(BaseSettingsModel): - enabled: bool = SettingsField(title="enabled") - template: str = SettingsField(title="Template") - - -class FVFXScopeOfWorkModel(BaseSettingsModel): - enabled: bool = SettingsField(title="enabled") - template: str = SettingsField(title="Template") - - -class ExctractSlateFrameParamModel(BaseSettingsModel): - f_submission_note: FSubmissionNoteModel = SettingsField( - title="f_submission_note", - default_factory=FSubmissionNoteModel - ) - f_submitting_for: FSubmistingForModel = SettingsField( - title="f_submitting_for", - default_factory=FSubmistingForModel - ) - f_vfx_scope_of_work: FVFXScopeOfWorkModel = SettingsField( - title="f_vfx_scope_of_work", - default_factory=FVFXScopeOfWorkModel - ) - - -class ExtractSlateFrameModel(BaseSettingsModel): - viewer_lut_raw: bool = SettingsField(title="Viewer lut raw") - key_value_mapping: ExctractSlateFrameParamModel = SettingsField( - title="Key value mapping", - default_factory=ExctractSlateFrameParamModel - ) - - -class IncrementScriptVersionModel(BaseSettingsModel): - enabled: bool = SettingsField(title="Enabled") - optional: bool = SettingsField(title="Optional") - active: bool = SettingsField(title="Active") - - -class PublishPluginsModel(BaseSettingsModel): - CollectInstanceData: CollectInstanceDataModel = SettingsField( - title="Collect Instance Version", - default_factory=CollectInstanceDataModel, - section="Collectors" - ) - ValidateCorrectAssetContext: OptionalPluginModel = SettingsField( - title="Validate Correct Folder Name", - default_factory=OptionalPluginModel, - section="Validators" - ) - ValidateKnobs: ValidateKnobsModel = SettingsField( - title="Validate Knobs", - default_factory=ValidateKnobsModel - ) - ValidateOutputResolution: OptionalPluginModel = SettingsField( - title="Validate Output Resolution", - default_factory=OptionalPluginModel - ) - ValidateGizmo: OptionalPluginModel = SettingsField( - title="Validate Gizmo", - default_factory=OptionalPluginModel - ) - ValidateBackdrop: OptionalPluginModel = SettingsField( - title="Validate Backdrop", - default_factory=OptionalPluginModel - ) - ValidateScriptAttributes: OptionalPluginModel = SettingsField( - title="Validate workfile attributes", - default_factory=OptionalPluginModel - ) - ExtractReviewData: ExtractReviewDataModel = SettingsField( - title="Extract Review Data", - default_factory=ExtractReviewDataModel - ) - ExtractReviewDataLut: ExtractReviewDataLutModel = SettingsField( - title="Extract Review Data Lut", - default_factory=ExtractReviewDataLutModel - ) - ExtractReviewIntermediates: ExtractReviewIntermediatesModel = ( - SettingsField( - title="Extract Review Intermediates", - default_factory=ExtractReviewIntermediatesModel - ) - ) - ExtractSlateFrame: ExtractSlateFrameModel = SettingsField( - title="Extract Slate Frame", - default_factory=ExtractSlateFrameModel - ) - IncrementScriptVersion: IncrementScriptVersionModel = SettingsField( - title="Increment Workfile Version", - default_factory=IncrementScriptVersionModel, - section="Integrators" - ) - - -DEFAULT_PUBLISH_PLUGIN_SETTINGS = { - "CollectInstanceData": { - "sync_workfile_version_on_product_types": [ - "nukenodes", - "camera", - "gizmo", - "source", - "render", - "write" - ] - }, - "ValidateCorrectAssetContext": { - "enabled": True, - "optional": True, - "active": True - }, - "ValidateKnobs": { - "enabled": False, - "knobs": "\n".join([ - '{', - ' "render": {', - ' "review": true', - ' }', - '}' - ]) - }, - "ValidateOutputResolution": { - "enabled": True, - "optional": True, - "active": True - }, - "ValidateGizmo": { - "enabled": True, - "optional": True, - "active": True - }, - "ValidateBackdrop": { - "enabled": True, - "optional": True, - "active": True - }, - "ValidateScriptAttributes": { - "enabled": True, - "optional": True, - "active": True - }, - "ExtractReviewData": { - "enabled": False - }, - "ExtractReviewDataLut": { - "enabled": False - }, - "ExtractReviewIntermediates": { - "enabled": True, - "viewer_lut_raw": False, - "outputs": [ - { - "name": "baking", - "publish": False, - "filter": { - "task_types": [], - "product_types": [], - "product_names": [] - }, - "read_raw": False, - "colorspace_override": { - "enabled": False, - "type": "colorspace", - "colorspace": "", - "display_view": { - "display": "", - "view": "" - } - }, - "bake_viewer_process": True, - "bake_viewer_input_process": True, - "reformat_nodes_config": { - "enabled": False, - "reposition_nodes": [ - { - "node_class": "Reformat", - "knobs": [ - { - "type": "text", - "name": "type", - "text": "to format" - }, - { - "type": "text", - "name": "format", - "text": "HD_1080" - }, - { - "type": "text", - "name": "filter", - "text": "Lanczos6" - }, - { - "type": "boolean", - "name": "black_outside", - "boolean": True - }, - { - "type": "boolean", - "name": "pbb", - "boolean": False - } - ] - } - ] - }, - "extension": "mov", - "add_custom_tags": [] - } - ] - }, - "ExtractSlateFrame": { - "viewer_lut_raw": False, - "key_value_mapping": { - "f_submission_note": { - "enabled": True, - "template": "{comment}" - }, - "f_submitting_for": { - "enabled": True, - "template": "{intent[value]}" - }, - "f_vfx_scope_of_work": { - "enabled": False, - "template": "" - } - } - }, - "IncrementScriptVersion": { - "enabled": True, - "optional": True, - "active": True - } -} diff --git a/server_addon/nuke/server/settings/scriptsmenu.py b/server_addon/nuke/server/settings/scriptsmenu.py deleted file mode 100644 index 7ffd6841d5..0000000000 --- a/server_addon/nuke/server/settings/scriptsmenu.py +++ /dev/null @@ -1,52 +0,0 @@ -from ayon_server.settings import BaseSettingsModel, SettingsField - - -class ScriptsmenuSubmodel(BaseSettingsModel): - """Item Definition""" - _isGroup = True - - type: str = SettingsField(title="Type") - command: str = SettingsField(title="Command") - sourcetype: str = SettingsField(title="Source Type") - title: str = SettingsField(title="Title") - tooltip: str = SettingsField(title="Tooltip") - - -class ScriptsmenuSettings(BaseSettingsModel): - """Nuke script menu project settings.""" - _isGroup = True - - name: str = SettingsField(title="Menu Name") - definition: list[ScriptsmenuSubmodel] = SettingsField( - default_factory=list, - title="Definition", - description="Scriptmenu Items Definition" - ) - - -DEFAULT_SCRIPTSMENU_SETTINGS = { - "name": "Custom Tools", - "definition": [ - { - "type": "action", - "sourcetype": "python", - "title": "Ayon Nuke Docs", - "command": "import webbrowser;webbrowser.open(url='https://ayon.ynput.io/docs/addon_nuke_artist')", # noqa - "tooltip": "Open the Ayon Nuke user doc page" - }, - { - "type": "action", - "sourcetype": "python", - "title": "Set Frame Start (Read Node)", - "command": "from openpype.hosts.nuke.startup.frame_setting_for_read_nodes import main;main();", # noqa - "tooltip": "Set frame start for read node(s)" - }, - { - "type": "action", - "sourcetype": "python", - "title": "Set non publish output for Write Node", - "command": "from openpype.hosts.nuke.startup.custom_write_node import main;main();", # noqa - "tooltip": "Open the OpenPype Nuke user doc page" - } - ] -} diff --git a/server_addon/nuke/server/settings/templated_workfile_build.py b/server_addon/nuke/server/settings/templated_workfile_build.py deleted file mode 100644 index 12ebedf570..0000000000 --- a/server_addon/nuke/server/settings/templated_workfile_build.py +++ /dev/null @@ -1,34 +0,0 @@ -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - task_types_enum, -) - - -class TemplatedWorkfileProfileModel(BaseSettingsModel): - task_types: list[str] = SettingsField( - default_factory=list, - title="Task types", - enum_resolver=task_types_enum - ) - task_names: list[str] = SettingsField( - default_factory=list, - title="Task names" - ) - path: str = SettingsField( - title="Path to template" - ) - keep_placeholder: bool = SettingsField( - False, - title="Keep placeholders") - create_first_version: bool = SettingsField( - True, - title="Create first version" - ) - - -class TemplatedWorkfileBuildModel(BaseSettingsModel): - """Settings for templated workfile builder.""" - profiles: list[TemplatedWorkfileProfileModel] = SettingsField( - default_factory=list - ) diff --git a/server_addon/nuke/server/settings/workfile_builder.py b/server_addon/nuke/server/settings/workfile_builder.py deleted file mode 100644 index 97961655f3..0000000000 --- a/server_addon/nuke/server/settings/workfile_builder.py +++ /dev/null @@ -1,84 +0,0 @@ -from ayon_server.settings import ( - BaseSettingsModel, - SettingsField, - task_types_enum, - MultiplatformPathModel, -) - - -class CustomTemplateModel(BaseSettingsModel): - task_types: list[str] = SettingsField( - default_factory=list, - title="Task types", - enum_resolver=task_types_enum - ) - path: MultiplatformPathModel = SettingsField( - default_factory=MultiplatformPathModel, - title="Gizmo Directory Path" - ) - - -class BuilderProfileItemModel(BaseSettingsModel): - product_name_filters: list[str] = SettingsField( - default_factory=list, - title="Product name" - ) - product_types: list[str] = SettingsField( - default_factory=list, - title="Product types" - ) - repre_names: list[str] = SettingsField( - default_factory=list, - title="Representations" - ) - loaders: list[str] = SettingsField( - default_factory=list, - title="Loader plugins" - ) - - -class BuilderProfileModel(BaseSettingsModel): - task_types: list[str] = SettingsField( - default_factory=list, - title="Task types", - enum_resolver=task_types_enum - ) - tasks: list[str] = SettingsField( - default_factory=list, - title="Task names" - ) - current_context: list[BuilderProfileItemModel] = SettingsField( - default_factory=list, - title="Current context" - ) - linked_assets: list[BuilderProfileItemModel] = SettingsField( - default_factory=list, - title="Linked assets/shots" - ) - - -class WorkfileBuilderModel(BaseSettingsModel): - """[deprecated] use Template Workfile Build Settings instead. - """ - create_first_version: bool = SettingsField( - title="Create first workfile") - custom_templates: list[CustomTemplateModel] = SettingsField( - default_factory=list, - title="Custom templates" - ) - builder_on_start: bool = SettingsField( - default=False, - title="Run Builder at first workfile" - ) - profiles: list[BuilderProfileModel] = SettingsField( - default_factory=list, - title="Builder profiles" - ) - - -DEFAULT_WORKFILE_BUILDER_SETTINGS = { - "create_first_version": False, - "custom_templates": [], - "builder_on_start": False, - "profiles": [] -}